diff --git a/package-lock.json b/package-lock.json
index 03b47b17c9..3e2e334995 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -9,28 +9,40 @@
"version": "3.3.0",
"license": "MIT",
"dependencies": {
+ "@codemirror/autocomplete": "^6.16.0",
+ "@codemirror/lang-json": "^6.0.1",
+ "@codemirror/language": "^6.10.1",
+ "@codemirror/lint": "^6.5.0",
"@fontsource/jetbrains-mono": "^5.0.19",
+ "@lezer/generator": "^1.7.0",
+ "@lezer/highlight": "^1.2.0",
+ "@lezer/lr": "^1.4.0",
"@nasa-jpl/aerie-ampcs": "^1.0.5",
"@nasa-jpl/seq-json-schema": "^1.3.1",
"@nasa-jpl/stellar": "^1.1.18",
+ "@neodrag/svelte": "^2.0.6",
"@streamparser/json": "^0.0.17",
"@sveltejs/adapter-node": "5.0.1",
"@sveltejs/kit": "^2.5.4",
+ "@tanstack/svelte-virtual": "^3.11.2",
"ag-grid-community": "32.2.0",
"ajv": "^8.12.0",
"bootstrap": "^5.3.0",
"bootstrap-icons": "^1.11.0",
+ "codemirror": "^6.0.1",
"cookie": "^0.6.0",
"d3-array": "^3.2.4",
"d3-axis": "^3.0.0",
"d3-brush": "^3.0.0",
"d3-drag": "^3.0.0",
+ "d3-format": "^3.1.0",
"d3-quadtree": "^3.0.1",
"d3-scale": "^4.0.2",
"d3-scale-chromatic": "^3.0.0",
"d3-selection": "^3.0.0",
"d3-shape": "^3.2.0",
"d3-time": "^3.1.0",
+ "d3-zoom": "^3.0.0",
"fastest-levenshtein": "^1.0.16",
"graphql-ws": "^5.16.2",
"json-source-map": "^0.6.1",
@@ -49,18 +61,9 @@
"toastify-js": "^1.12.0"
},
"devDependencies": {
- "@codemirror/autocomplete": "^6.16.0",
- "@codemirror/lang-json": "^6.0.1",
- "@codemirror/language": "^6.10.1",
- "@codemirror/lint": "^6.5.0",
- "@lezer/generator": "^1.7.0",
- "@lezer/highlight": "^1.2.0",
- "@lezer/lr": "^1.4.0",
- "@neodrag/svelte": "^2.0.6",
"@playwright/test": "^1.49.1",
"@poppanator/sveltekit-svg": "^4.2.1",
"@sveltejs/vite-plugin-svelte": "^3.0.0",
- "@tanstack/svelte-virtual": "^3.11.2",
"@testing-library/svelte": "^4.0.2",
"@types/cookie": "^0.6.0",
"@types/d3-array": "^3.0.5",
@@ -85,9 +88,6 @@
"@vitest/coverage-v8": "^1.4.0",
"@vitest/ui": "^1.4.0",
"cloc": "2.0.0-cloc",
- "codemirror": "^6.0.1",
- "d3-format": "^3.1.0",
- "d3-zoom": "^3.0.0",
"esbuild": "^0.24.0",
"eslint": "^8.43.0",
"eslint-config-prettier": "^9.1.0",
@@ -300,7 +300,6 @@
"version": "6.16.0",
"resolved": "https://registry.npmjs.org/@codemirror/autocomplete/-/autocomplete-6.16.0.tgz",
"integrity": "sha512-P/LeCTtZHRTCU4xQsa89vSKWecYv1ZqwzOd5topheGRf+qtacFgBeIMQi3eL8Kt/BUNvxUWkx+5qP2jlGoARrg==",
- "dev": true,
"dependencies": {
"@codemirror/language": "^6.0.0",
"@codemirror/state": "^6.0.0",
@@ -318,7 +317,6 @@
"version": "6.3.3",
"resolved": "https://registry.npmjs.org/@codemirror/commands/-/commands-6.3.3.tgz",
"integrity": "sha512-dO4hcF0fGT9tu1Pj1D2PvGvxjeGkbC6RGcZw6Qs74TH+Ed1gw98jmUgd2axWvIZEqTeTuFrg1lEB1KV6cK9h1A==",
- "dev": true,
"dependencies": {
"@codemirror/language": "^6.0.0",
"@codemirror/state": "^6.4.0",
@@ -330,7 +328,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/@codemirror/lang-json/-/lang-json-6.0.1.tgz",
"integrity": "sha512-+T1flHdgpqDDlJZ2Lkil/rLiRy684WMLc74xUnjJH48GQdfJo/pudlTRreZmKwzP8/tGdKf83wlbAdOCzlJOGQ==",
- "dev": true,
"dependencies": {
"@codemirror/language": "^6.0.0",
"@lezer/json": "^1.0.0"
@@ -340,7 +337,6 @@
"version": "6.10.1",
"resolved": "https://registry.npmjs.org/@codemirror/language/-/language-6.10.1.tgz",
"integrity": "sha512-5GrXzrhq6k+gL5fjkAwt90nYDmjlzTIJV8THnxNFtNKWotMIlzzN+CpqxqwXOECnUdOndmSeWntVrVcv5axWRQ==",
- "dev": true,
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.23.0",
@@ -354,7 +350,6 @@
"version": "6.5.0",
"resolved": "https://registry.npmjs.org/@codemirror/lint/-/lint-6.5.0.tgz",
"integrity": "sha512-+5YyicIaaAZKU8K43IQi8TBy6mF6giGeWAH7N96Z5LC30Wm5JMjqxOYIE9mxwMG1NbhT2mA3l9hA4uuKUM3E5g==",
- "dev": true,
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0",
@@ -365,7 +360,6 @@
"version": "6.5.6",
"resolved": "https://registry.npmjs.org/@codemirror/search/-/search-6.5.6.tgz",
"integrity": "sha512-rpMgcsh7o0GuCDUXKPvww+muLA1pDJaFrpq/CCHtpQJYz8xopu4D1hPcKRoDD0YlF8gZaqTNIRa4VRBWyhyy7Q==",
- "dev": true,
"dependencies": {
"@codemirror/state": "^6.0.0",
"@codemirror/view": "^6.0.0",
@@ -375,14 +369,12 @@
"node_modules/@codemirror/state": {
"version": "6.4.1",
"resolved": "https://registry.npmjs.org/@codemirror/state/-/state-6.4.1.tgz",
- "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A==",
- "dev": true
+ "integrity": "sha512-QkEyUiLhsJoZkbumGZlswmAhA7CBU02Wrz7zvH4SrcifbsqwlXShVXg65f3v/ts57W3dqyamEriMhij1Z3Zz4A=="
},
"node_modules/@codemirror/view": {
"version": "6.26.0",
"resolved": "https://registry.npmjs.org/@codemirror/view/-/view-6.26.0.tgz",
"integrity": "sha512-nSSmzONpqsNzshPOxiKhK203R6BvABepugAe34QfQDbNDslyjkqBuKgrK5ZBvqNXpfxz5iLrlGTmEfhbQyH46A==",
- "dev": true,
"dependencies": {
"@codemirror/state": "^6.4.0",
"style-mod": "^4.1.0",
@@ -1072,14 +1064,12 @@
"node_modules/@lezer/common": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/@lezer/common/-/common-1.2.1.tgz",
- "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ==",
- "dev": true
+ "integrity": "sha512-yemX0ZD2xS/73llMZIK6KplkjIjf2EvAHcinDi/TfJ9hS25G0388+ClHt6/3but0oOxinTcQHJLDXh6w1crzFQ=="
},
"node_modules/@lezer/generator": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/@lezer/generator/-/generator-1.7.0.tgz",
"integrity": "sha512-IJ16tx3biLKlCXUzcK4v8S10AVa2BSM2rB12rtAL6f1hL2TS/HQQlGCoWRvanlL2J4mCYEEIv9uG7n4kVMkVDA==",
- "dev": true,
"dependencies": {
"@lezer/common": "^1.1.0",
"@lezer/lr": "^1.3.0"
@@ -1092,7 +1082,6 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/@lezer/highlight/-/highlight-1.2.0.tgz",
"integrity": "sha512-WrS5Mw51sGrpqjlh3d4/fOwpEV2Hd3YOkp9DBt4k8XZQcoTHZFB7sx030A6OcahF4J1nDQAa3jXlTVVYH50IFA==",
- "dev": true,
"dependencies": {
"@lezer/common": "^1.0.0"
}
@@ -1101,7 +1090,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@lezer/json/-/json-1.0.2.tgz",
"integrity": "sha512-xHT2P4S5eeCYECyKNPhr4cbEL9tc8w83SPwRC373o9uEdrvGKTZoJVAGxpOsZckMlEh9W23Pc72ew918RWQOBQ==",
- "dev": true,
"dependencies": {
"@lezer/common": "^1.2.0",
"@lezer/highlight": "^1.0.0",
@@ -1112,7 +1100,6 @@
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@lezer/lr/-/lr-1.4.0.tgz",
"integrity": "sha512-Wst46p51km8gH0ZUmeNrtpRYmdlRHUpN1DQd3GFAyKANi8WVz8c2jHYTf1CVScFaCjQw1iO3ZZdqGDxQPRErTg==",
- "dev": true,
"dependencies": {
"@lezer/common": "^1.0.0"
}
@@ -1138,8 +1125,7 @@
"node_modules/@neodrag/svelte": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/@neodrag/svelte/-/svelte-2.0.6.tgz",
- "integrity": "sha512-jjmTjRTMJaer2IyEIoS5xbccmFmOpkeoTKpBORkMItCPjHWE19eW3kvH9SuTvZJAKnKERVNGdW3VBuDxZif9Dg==",
- "dev": true
+ "integrity": "sha512-jjmTjRTMJaer2IyEIoS5xbccmFmOpkeoTKpBORkMItCPjHWE19eW3kvH9SuTvZJAKnKERVNGdW3VBuDxZif9Dg=="
},
"node_modules/@nodelib/fs.scandir": {
"version": "2.1.5",
@@ -1607,7 +1593,6 @@
"version": "3.11.2",
"resolved": "https://registry.npmjs.org/@tanstack/svelte-virtual/-/svelte-virtual-3.11.2.tgz",
"integrity": "sha512-o0VWDf8GlkZ8S5E2GjQq39qhZIB6U1Kej05/aTdxIQy18c022CxYgGWUydmrWJE3DV2ZA/q+Zm30oqnSRhQ4Lw==",
- "dev": true,
"dependencies": {
"@tanstack/virtual-core": "3.11.2"
},
@@ -1623,7 +1608,6 @@
"version": "3.11.2",
"resolved": "https://registry.npmjs.org/@tanstack/virtual-core/-/virtual-core-3.11.2.tgz",
"integrity": "sha512-vTtpNt7mKCiZ1pwU9hfKPhpdVO2sVzFQsxoVBGtOSHxlrRRzYr8iQ2TlwbAcRYCcEiZ9ECAM8kBzH0v2+VzfKw==",
- "dev": true,
"funding": {
"type": "github",
"url": "https://github.com/sponsors/tannerlinsley"
@@ -2744,7 +2728,6 @@
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/codemirror/-/codemirror-6.0.1.tgz",
"integrity": "sha512-J8j+nZ+CdWmIeFIGXEFbFPtpiYacFMDR8GlHK3IyHQJMCaVRfGx9NT+Hxivv1ckLWPvNdZqndbr/7lVhrf/Svg==",
- "dev": true,
"dependencies": {
"@codemirror/autocomplete": "^6.0.0",
"@codemirror/commands": "^6.0.0",
@@ -2859,8 +2842,7 @@
"node_modules/crelt": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/crelt/-/crelt-1.0.6.tgz",
- "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g==",
- "dev": true
+ "integrity": "sha512-VQ2MBenTq1fWZUH9DJNGti7kKv6EeAuYr3cLwxUWhIu1baTaXh4Ib5W2CqHVqib4/MqbYGJqiL3Zb8GJZr3l4g=="
},
"node_modules/cross-spawn": {
"version": "7.0.3",
@@ -3190,7 +3172,6 @@
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz",
"integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==",
- "dev": true,
"dependencies": {
"d3-dispatch": "1 - 3",
"d3-drag": "2 - 3",
@@ -6717,8 +6698,7 @@
"node_modules/style-mod": {
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/style-mod/-/style-mod-4.1.2.tgz",
- "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw==",
- "dev": true
+ "integrity": "sha512-wnD1HyVqpJUI2+eKZ+eo1UwghftP6yuFheBqqe+bWCotBjC2K1YnteJILRMs3SM4V/0dLEW1SC27MWP5y+mwmw=="
},
"node_modules/stylelint": {
"version": "16.3.1",
@@ -7651,8 +7631,7 @@
"node_modules/w3c-keyname": {
"version": "2.2.8",
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
- "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ==",
- "dev": true
+ "integrity": "sha512-dpojBhNsCNN7T82Tm7k26A6G9ML3NkhDsnw9n/eoxSRlVBB4CEtIQ/KTCLI2Fwf3ataSXRhYFkQi3SlnFwPvPQ=="
},
"node_modules/w3c-xmlserializer": {
"version": "5.0.0",
diff --git a/package.json b/package.json
index 00bd66f2d1..b837c626f9 100644
--- a/package.json
+++ b/package.json
@@ -38,28 +38,40 @@
"version": "node ./scripts/version.js"
},
"dependencies": {
+ "@codemirror/autocomplete": "^6.16.0",
+ "@codemirror/lang-json": "^6.0.1",
+ "@codemirror/language": "^6.10.1",
+ "@codemirror/lint": "^6.5.0",
"@fontsource/jetbrains-mono": "^5.0.19",
+ "@lezer/generator": "^1.7.0",
+ "@lezer/highlight": "^1.2.0",
+ "@lezer/lr": "^1.4.0",
"@nasa-jpl/aerie-ampcs": "^1.0.5",
"@nasa-jpl/seq-json-schema": "^1.3.1",
"@nasa-jpl/stellar": "^1.1.18",
+ "@neodrag/svelte": "^2.0.6",
"@streamparser/json": "^0.0.17",
"@sveltejs/adapter-node": "5.0.1",
"@sveltejs/kit": "^2.5.4",
+ "@tanstack/svelte-virtual": "^3.11.2",
"ag-grid-community": "32.2.0",
"ajv": "^8.12.0",
"bootstrap": "^5.3.0",
"bootstrap-icons": "^1.11.0",
+ "codemirror": "^6.0.1",
"cookie": "^0.6.0",
"d3-array": "^3.2.4",
"d3-axis": "^3.0.0",
"d3-brush": "^3.0.0",
"d3-drag": "^3.0.0",
+ "d3-format": "^3.1.0",
"d3-quadtree": "^3.0.1",
"d3-scale": "^4.0.2",
"d3-scale-chromatic": "^3.0.0",
"d3-selection": "^3.0.0",
"d3-shape": "^3.2.0",
"d3-time": "^3.1.0",
+ "d3-zoom": "^3.0.0",
"fastest-levenshtein": "^1.0.16",
"graphql-ws": "^5.16.2",
"json-source-map": "^0.6.1",
@@ -78,18 +90,9 @@
"toastify-js": "^1.12.0"
},
"devDependencies": {
- "@codemirror/autocomplete": "^6.16.0",
- "@codemirror/lang-json": "^6.0.1",
- "@codemirror/language": "^6.10.1",
- "@codemirror/lint": "^6.5.0",
- "@lezer/generator": "^1.7.0",
- "@lezer/highlight": "^1.2.0",
- "@lezer/lr": "^1.4.0",
"@playwright/test": "^1.49.1",
- "@neodrag/svelte": "^2.0.6",
"@poppanator/sveltekit-svg": "^4.2.1",
"@sveltejs/vite-plugin-svelte": "^3.0.0",
- "@tanstack/svelte-virtual": "^3.11.2",
"@testing-library/svelte": "^4.0.2",
"@types/cookie": "^0.6.0",
"@types/d3-array": "^3.0.5",
@@ -114,9 +117,6 @@
"@vitest/coverage-v8": "^1.4.0",
"@vitest/ui": "^1.4.0",
"cloc": "2.0.0-cloc",
- "codemirror": "^6.0.1",
- "d3-format": "^3.1.0",
- "d3-zoom": "^3.0.0",
"esbuild": "^0.24.0",
"eslint": "^8.43.0",
"eslint-config-prettier": "^9.1.0",
diff --git a/src/components/parcels/ParcelForm.svelte b/src/components/parcels/ParcelForm.svelte
index 45452baea8..b853a15fe7 100644
--- a/src/components/parcels/ParcelForm.svelte
+++ b/src/components/parcels/ParcelForm.svelte
@@ -150,7 +150,7 @@
});
if (parcelToParameterDictionariesToAdd.length > 0) {
- await effects.createParcelToParameterDictionaries(parcelOwner, parcelToParameterDictionariesToAdd, user);
+ await effects.createParcelToParameterDictionaries(parcelToParameterDictionariesToAdd, user);
}
if (parcelToParameterDictionaryIdsToDelete.length > 0) {
diff --git a/src/components/scheduling/goals/SchedulingGoal.svelte b/src/components/scheduling/goals/SchedulingGoal.svelte
index 2b40b882fd..50c5ea228c 100644
--- a/src/components/scheduling/goals/SchedulingGoal.svelte
+++ b/src/components/scheduling/goals/SchedulingGoal.svelte
@@ -81,10 +81,10 @@
}
function onEnable(event: Event) {
- const { value: enabled } = getTarget(event);
+ const { value: enabledUpdate } = getTarget(event);
dispatch('updateGoalPlanSpec', {
...goalPlanSpec,
- enabled: enabled as boolean,
+ enabled: enabledUpdate as boolean,
});
}
@@ -110,10 +110,10 @@
});
}
- function simulateAfter(simulateAfter: boolean) {
+ function simulateAfter(simulateAfterUpdate: boolean) {
dispatch('updateGoalPlanSpec', {
...goalPlanSpec,
- simulate_after: simulateAfter,
+ simulate_after: simulateAfterUpdate,
});
}
@@ -129,10 +129,10 @@
});
}
- function updatePriority(priority: number) {
+ function updatePriority(priorityUpdate: number) {
dispatch('updateGoalPlanSpec', {
...goalPlanSpec,
- priority,
+ priority: priorityUpdate,
});
}
diff --git a/src/components/sequencing/CommandPanel/SelectedCommand.svelte b/src/components/sequencing/CommandPanel/SelectedCommand.svelte
index e8f0bb5291..5b6d8b7e5c 100644
--- a/src/components/sequencing/CommandPanel/SelectedCommand.svelte
+++ b/src/components/sequencing/CommandPanel/SelectedCommand.svelte
@@ -127,8 +127,14 @@
{commandInfoMapper}
{variablesInScope}
setInEditor={debounce((token, val) => setInEditor(editorSequenceView, token, val), 250)}
- addDefaultArgs={(commandNode, missingArgDefArray) =>
- addDefaultArgs(commandDictionary, editorSequenceView, commandNode, missingArgDefArray, commandInfoMapper)}
+ addDefaultArgs={(commandNodeToAddArgs, missingArgDefs) =>
+ addDefaultArgs(
+ commandDictionary,
+ editorSequenceView,
+ commandNodeToAddArgs,
+ missingArgDefs,
+ commandInfoMapper,
+ )}
/>
{/each}
diff --git a/src/components/sequencing/form/EnumEditor.svelte b/src/components/sequencing/form/EnumEditor.svelte
index d0fdafc185..e823c02fd9 100644
--- a/src/components/sequencing/form/EnumEditor.svelte
+++ b/src/components/sequencing/form/EnumEditor.svelte
@@ -6,7 +6,6 @@
import SearchableDropdown from '../../ui/SearchableDropdown.svelte';
const SEARCH_THRESHOLD = 100;
- const MAX_SEARCH_ITEMS = 1_000;
export let argDef: FswCommandArgumentEnum;
export let commandDictionary: CommandDictionary | null = null;
@@ -39,7 +38,6 @@
{#if enumValues.length > SEARCH_THRESHOLD}
();
- let dirtyFilter = structuredClone(filter);
- let currentField = dirtyFilter.field as keyof typeof ActivityLayerFilterFieldType;
+ let dirtyFilter: ActivityLayerDynamicFilter = structuredClone(filter);
+ let currentField: keyof typeof ActivityLayerFilterFieldType =
+ dirtyFilter.field as keyof typeof ActivityLayerFilterFieldType;
let currentOperator: keyof typeof FilterOperator | null = dirtyFilter.operator;
let subfields: ActivityLayerFilterSubfieldSchema[] | undefined = undefined;
let currentSubfieldLabel =
@@ -52,6 +53,7 @@
let currentUnit: string = '';
let currentValuePossibilities: Array = [];
+ $: dirtyFilter = structuredClone(filter);
$: subfields = schema.Parameter?.subfields;
$: if (currentField !== 'Parameter') {
@@ -123,7 +125,7 @@
const {
detail: { tag, type },
} = event;
- let newValue = Array.isArray(currentValue) ? currentValue : [];
+ const newValue = Array.isArray(currentValue) ? currentValue : [];
if (type === 'remove') {
currentValue = (newValue as number[]).filter(tagId => tagId !== tag.id) as number[];
} else if (type === 'select') {
@@ -160,7 +162,7 @@
function onRangeInputChange(event: Event, bound: 'min' | 'max' = 'min') {
const { value } = getTarget(event);
if (typeof value === 'number') {
- let newValue = Array.isArray(currentValue) ? currentValue.slice() : [0, 0];
+ const newValue = Array.isArray(currentValue) ? currentValue.slice() : [0, 0];
if (bound === 'min') {
newValue[0] = value;
} else {
@@ -293,7 +295,7 @@
searchPlaceholder="Filter Variants"
on:change={onSelectValue}
selectedOptionValues={[currentValueAsStringOrNumber]}
- options={currentValuePossibilities.sort().map(value => ({ display: value, value: value }))}
+ options={currentValuePossibilities.sort().map(value => ({ display: value, value }))}
>
diff --git a/src/components/ui/SearchableDropdown.svelte b/src/components/ui/SearchableDropdown.svelte
index 12c3f6ad73..d936aba36e 100644
--- a/src/components/ui/SearchableDropdown.svelte
+++ b/src/components/ui/SearchableDropdown.svelte
@@ -35,7 +35,6 @@
export let error: string | undefined = undefined;
export let hasUpdatePermission: boolean = true;
export let options: DropdownOptions = [];
- export let maxItems: number | undefined = undefined;
export let maxListHeight: string = '300px';
export let name: string | undefined = undefined;
export let updatePermissionError: string = 'You do not have permission to update this';
@@ -110,11 +109,7 @@
: options.filter(option => {
return new RegExp(searchFilter, 'i').test(option.display);
});
- if (maxItems !== undefined) {
- displayedOptions = filteredOptions.slice(0, maxItems);
- } else {
- displayedOptions = filteredOptions;
- }
+ displayedOptions = filteredOptions;
}
$: if (disabled) {
hideMenu();
diff --git a/src/enums/gql.ts b/src/enums/gql.ts
new file mode 100644
index 0000000000..319189d014
--- /dev/null
+++ b/src/enums/gql.ts
@@ -0,0 +1,241 @@
+export enum Queries {
+ ACTIVITY_DIRECTIVES = 'activity_directive',
+ ACTIVITY_DIRECTIVE_CHANGELOG = 'activity_directive_changelog',
+ ACTIVITY_DIRECTIVE_METADATA_SCHEMAS = 'activity_directive_metadata_schema',
+ ACTIVITY_DIRECTIVE_VALIDATIONS = 'activity_directive_validations',
+ ACTIVITY_PRESETS = 'activity_presets',
+ ACTIVITY_TYPES = 'activity_type',
+ ADD_EXTERNAL_DATASET = 'addExternalDataset',
+ ANCHOR_VALIDATION_STATUS = 'anchor_validation_status',
+ APPLY_PRESET_TO_ACTIVITY = 'apply_preset_to_activity',
+ BEGIN_MERGE = 'begin_merge',
+ CANCEL_MERGE = 'cancel_merge',
+ CHANNEL_DICTIONARIES = 'channel_dictionary',
+ COMMAND_DICTIONARIES = 'command_dictionary',
+ COMMIT_MERGE = 'commit_merge',
+ CONSTRAINTS_DSL_TYPESCRIPT = 'constraintsDslTypescript',
+ CONSTRAINT_DEFINITION = 'constraint_definition_by_pk',
+ CONSTRAINT_METADATA = 'constraint_metadata_by_pk',
+ CONSTRAINT_METADATAS = 'constraint_metadata',
+ CONSTRAINT_RUN = 'constraint_run',
+ CONSTRAINT_SPECIFICATIONS = 'constraint_specification',
+ CONSTRAINT_VIOLATIONS = 'constraintViolations',
+ CREATE_EXPANSION_SET = 'createExpansionSet',
+ CREATE_MERGE_REQUEST = 'create_merge_request',
+ CREATE_SNAPSHOT = 'create_snapshot',
+ DELETE_ACTIVITY_DELETE_SUBTREE_BULK = 'delete_activity_by_pk_delete_subtree_bulk',
+ DELETE_ACTIVITY_DIRECTIVES = 'delete_activity_directive',
+ DELETE_ACTIVITY_DIRECTIVE_TAG = 'delete_activity_directive_tags_by_pk', // pluralization is a typo in the db
+ DELETE_ACTIVITY_PRESET = 'delete_activity_presets_by_pk',
+ DELETE_ACTIVITY_REANCHOR_PLAN_START_BULK = 'delete_activity_by_pk_reanchor_plan_start_bulk',
+ DELETE_ACTIVITY_REANCHOR_TO_ANCHOR_BULK = 'delete_activity_by_pk_reanchor_to_anchor_bulk',
+ DELETE_CHANNEL_DICTIONARY = 'delete_channel_dictionary_by_pk',
+ DELETE_COMMAND_DICTIONARY = 'delete_command_dictionary_by_pk',
+ DELETE_CONSTRAINT_DEFINITION_TAGS = 'delete_constraint_definition_tags',
+ DELETE_CONSTRAINT_METADATA = 'delete_constraint_metadata_by_pk',
+ DELETE_CONSTRAINT_MODEL_SPECIFICATIONS = 'delete_constraint_model_specification',
+ DELETE_CONSTRAINT_SPECIFICATIONS = 'delete_constraint_specification',
+ DELETE_CONSTRAINT_TAGS = 'delete_constraint_tags',
+ DELETE_DERIVATION_GROUP = 'delete_derivation_group',
+ DELETE_EXPANSION_RULE = 'delete_expansion_rule_by_pk',
+ DELETE_EXPANSION_RULE_TAGS = 'delete_expansion_rule_tags',
+ DELETE_EXPANSION_SET = 'delete_expansion_set_by_pk',
+ DELETE_EXTERNAL_EVENT = 'delete_external_event',
+ DELETE_EXTERNAL_EVENT_TYPE = 'delete_external_event_type_by_pk',
+ DELETE_EXTERNAL_SOURCE = 'delete_external_source',
+ DELETE_EXTERNAL_SOURCE_TYPE = 'delete_external_source_type_by_pk',
+ DELETE_UPLOADED_FILE = 'delete_uploaded_file_by_pk',
+ DELETE_MISSION_MODEL = 'delete_mission_model_by_pk',
+ DELETE_PARAMETER_DICTIONARY = 'delete_parameter_dictionary_by_pk',
+ DELETE_PARCEL = 'delete_parcel_by_pk',
+ DELETE_PARCEL_TO_DICTIONARY_ASSOCIATION = 'delete_parcel_to_parameter_dictionary',
+ DELETE_PLAN = 'delete_plan_by_pk',
+ DELETE_PLAN_COLLABORATOR = 'delete_plan_collaborators_by_pk',
+ DELETE_PLAN_DERIVATION_GROUP = 'delete_plan_derivation_group',
+ DELETE_PLAN_SNAPSHOT = 'delete_plan_snapshot_by_pk',
+ DELETE_PLAN_TAG = 'delete_plan_tags_by_pk', // pluralization is a typo in the db
+ DELETE_PRESET_TO_DIRECTIVE = 'delete_preset_to_directive_by_pk',
+ DELETE_SCHEDULING_CONDITION_DEFINITION_TAGS = 'delete_scheduling_condition_definition_tags',
+ DELETE_SCHEDULING_CONDITION_METADATA = 'delete_scheduling_condition_metadata_by_pk',
+ DELETE_SCHEDULING_CONDITION_METADATA_TAGS = 'delete_scheduling_condition_tags',
+ DELETE_SCHEDULING_CONDITION_MODEL_SPECIFICATIONS = 'delete_scheduling_model_specification_conditions',
+ DELETE_SCHEDULING_GOAL_DEFINITION_TAGS = 'delete_scheduling_goal_definition_tags',
+ DELETE_SCHEDULING_GOAL_METADATA = 'delete_scheduling_goal_metadata_by_pk',
+ DELETE_SCHEDULING_GOAL_METADATA_TAGS = 'delete_scheduling_goal_tags',
+ DELETE_SCHEDULING_GOAL_MODEL_SPECIFICATIONS = 'delete_scheduling_model_specification_goals',
+ DELETE_SCHEDULING_SPECIFICATION = 'delete_scheduling_specification',
+ DELETE_SCHEDULING_SPECIFICATION_CONDITIONS = 'delete_scheduling_specification_conditions',
+ DELETE_SCHEDULING_SPECIFICATION_GOALS = 'delete_scheduling_specification_goals',
+ DELETE_SEQUENCE = 'delete_sequence_by_pk',
+ DELETE_SEQUENCE_ADAPTATION = 'delete_sequence_adaptation_by_pk',
+ DELETE_SEQUENCE_TO_SIMULATED_ACTIVITY = 'delete_sequence_to_simulated_activity_by_pk',
+ DELETE_SIMULATION_TEMPLATE = 'delete_simulation_template_by_pk',
+ DELETE_TAG = 'delete_tags_by_pk',
+ DELETE_USER_SEQUENCE = 'delete_user_sequence_by_pk',
+ DELETE_VIEW = 'delete_view_by_pk',
+ DELETE_VIEWS = 'delete_view',
+ DENY_MERGE = 'deny_merge',
+ DUPLICATE_PLAN = 'duplicate_plan',
+ EVENT = 'event',
+ EXPAND_ALL_ACTIVITIES = 'expandAllActivities',
+ EXPANSION_RULE = 'expansion_rule_by_pk',
+ EXPANSION_RULES = 'expansion_rule',
+ EXPANSION_RULE_TAGS = 'expansion_rule_tags',
+ EXPANSION_RUNS = 'expansion_run',
+ EXPANSION_SETS = 'expansion_set',
+ EXTENSIONS = 'extensions',
+ DERIVED_EVENTS = 'derived_events',
+ EXTERNAL_EVENT = 'external_event',
+ EXTERNAL_EVENT_TYPES = 'external_event_type',
+ EXTERNAL_SOURCE_EVENT_TYPES = 'external_source_event_type',
+ EXTERNAL_SOURCE = 'external_source_by_pk',
+ EXTERNAL_SOURCES = 'external_source',
+ EXTERNAL_SOURCE_TYPES = 'external_source_type',
+ DERIVATION_GROUP = 'derivation_group',
+ DERIVATION_GROUP_COMP = 'derivation_group_comp',
+ PLAN_DERIVATION_GROUP = 'plan_derivation_group',
+ GET_ACTIVITY_EFFECTIVE_ARGUMENTS_BULK = 'getActivityEffectiveArgumentsBulk',
+ GET_ACTIVITY_TYPE_SCRIPT = 'getActivityTypeScript',
+ GET_COMMAND_TYPE_SCRIPT = 'getCommandTypeScript',
+ GET_CONFLICTING_ACTIVITIES = 'get_conflicting_activities',
+ GET_EDSL_FOR_SEQ_JSON = 'getEdslForSeqJson',
+ GET_MODEL_EFFECTIVE_ARGUMENTS = 'getModelEffectiveArguments',
+ GET_NON_CONFLICTING_ACTIVITIES = 'get_non_conflicting_activities',
+ GET_SEQUENCE_SEQ_JSON = 'getSequenceSeqJson',
+ GET_USER_SEQUENCE_SEQ_JSON = 'getUserSequenceSeqJson',
+ INSERT_ACTIVITY_DIRECTIVE = 'insert_activity_directive_one',
+ INSERT_ACTIVITY_DIRECTIVES = 'insert_activity_directive',
+ INSERT_ACTIVITY_DIRECTIVE_TAGS = 'insert_activity_directive_tags',
+ INSERT_ACTIVITY_PRESET = 'insert_activity_presets_one',
+ INSERT_CHANNEL_DICTIONARY = 'insert_channel_dictionary_one',
+ INSERT_DERIVATION_GROUP = 'insert_derivation_group_one',
+ INSERT_DICTIONARY = 'insert_dictionary_one',
+ INSERT_CONSTRAINT_DEFINITION = 'insert_constraint_definition_one',
+ INSERT_CONSTRAINT_DEFINITION_TAGS = 'insert_constraint_definition_tags',
+ INSERT_CONSTRAINT_METADATA = 'insert_constraint_metadata_one',
+ INSERT_CONSTRAINT_MODEL_SPECIFICATION = 'insert_constraint_model_specification_one',
+ INSERT_CONSTRAINT_MODEL_SPECIFICATIONS = 'insert_constraint_model_specification',
+ INSERT_CONSTRAINT_SPECIFICATIONS = 'insert_constraint_specification',
+ INSERT_CONSTRAINT_TAGS = 'insert_constraint_tags',
+ INSERT_EXPANSION_RULE = 'insert_expansion_rule_one',
+ INSERT_EXPANSION_RULE_TAGS = 'insert_expansion_rule_tags',
+ INSERT_EXTERNAL_EVENT_TYPE = 'insert_external_event_type',
+ INSERT_EXTERNAL_EVENT_TYPE_ONE = 'insert_external_event_type_one',
+ INSERT_EXTERNAL_SOURCE = 'insert_external_source_one',
+ INSERT_EXTERNAL_SOURCE_TYPE = 'insert_external_source_type_one',
+ INSERT_MISSION_MODEL = 'insert_mission_model_one',
+ INSERT_PARAMETER_DICTIONARY = 'insert_parameter_dictionary_one',
+ INSERT_PARCEL = 'insert_parcel_one',
+ INSERT_PARCEL_TO_PARAMETER_DICTIONARY = 'insert_parcel_to_parameter_dictionary',
+ INSERT_PLAN = 'insert_plan_one',
+ INSERT_PLAN_DERIVATION_GROUP = 'insert_plan_derivation_group_one',
+ INSERT_PLAN_SNAPSHOT_TAGS = 'insert_plan_snapshot_tags',
+ INSERT_PLAN_COLLABORATORS = 'insert_plan_collaborators',
+ INSERT_PLAN_TAGS = 'insert_plan_tags',
+ INSERT_SCHEDULING_CONDITION_DEFINITION = 'insert_scheduling_condition_definition_one',
+ INSERT_SCHEDULING_CONDITION_DEFINITION_TAGS = 'insert_scheduling_condition_definition_tags',
+ INSERT_SCHEDULING_CONDITION_METADATA = 'insert_scheduling_condition_metadata_one',
+ INSERT_SCHEDULING_CONDITION_TAGS = 'insert_scheduling_condition_tags',
+ INSERT_SCHEDULING_GOAL_DEFINITION = 'insert_scheduling_goal_definition_one',
+ INSERT_SCHEDULING_GOAL_DEFINITION_TAGS = 'insert_scheduling_goal_definition_tags',
+ INSERT_SCHEDULING_GOAL_METADATA = 'insert_scheduling_goal_metadata_one',
+ INSERT_SCHEDULING_GOAL_TAGS = 'insert_scheduling_goal_tags',
+ INSERT_SCHEDULING_MODEL_SPECIFICATION_CONDITIONS = 'insert_scheduling_model_specification_conditions',
+ INSERT_SCHEDULING_MODEL_SPECIFICATION_GOALS = 'insert_scheduling_model_specification_goals',
+ INSERT_SCHEDULING_SPECIFICATION = 'insert_scheduling_specification_one',
+ INSERT_SCHEDULING_SPECIFICATION_CONDITION = 'insert_scheduling_specification_conditions_one',
+ INSERT_SCHEDULING_SPECIFICATION_CONDITIONS = 'insert_scheduling_specification_conditions',
+ INSERT_SCHEDULING_SPECIFICATION_GOAL = 'insert_scheduling_specification_goals_one',
+ INSERT_SCHEDULING_SPECIFICATION_GOALS = 'insert_scheduling_specification_goals',
+ INSERT_SEQUENCE = 'insert_sequence_one',
+ INSERT_SEQUENCE_ADAPTATION = 'insert_sequence_adaptation_one',
+ INSERT_SEQUENCE_TO_SIMULATED_ACTIVITY = 'insert_sequence_to_simulated_activity_one',
+ INSERT_SIMULATION_TEMPLATE = 'insert_simulation_template_one',
+ INSERT_TAG = 'insert_tags_one',
+ INSERT_TAGS = 'insert_tags',
+ INSERT_USER_SEQUENCE = 'insert_user_sequence_one',
+ INSERT_VIEW = 'insert_view_one',
+ INSERT_WORKSPACE = 'insert_workspace_one',
+ MERGE_REQUEST = 'merge_request_by_pk',
+ MERGE_REQUESTS = 'merge_request',
+ MISSION_MODEL = 'mission_model_by_pk',
+ MISSION_MODELS = 'mission_model',
+ PARAMETER_DICTIONARIES = 'parameter_dictionary',
+ PARCEL = 'parcel_by_pk',
+ PARCELS = 'parcel',
+ PARCEL_TO_PARAMETER_DICTIONARY = 'parcel_to_parameter_dictionary',
+ PLAN = 'plan_by_pk',
+ PLANS = 'plan',
+ PLAN_DATASETS = 'plan_dataset',
+ PLAN_SNAPSHOTS = 'plan_snapshot',
+ PLAN_SNAPSHOT_ACTIVITIES = 'plan_snapshot_activities',
+ PROFILES = 'profile',
+ RESOURCE_TYPES = 'resource_type',
+ RESTORE_ACTIVITY_FROM_CHANGELOG = 'restoreActivityFromChangelog',
+ RESTORE_FROM_SNAPSHOT = 'restore_from_snapshot',
+ SCHEDULE = 'schedule',
+ SCHEDULING_CONDITION_METADATA = 'scheduling_condition_metadata_by_pk',
+ SCHEDULING_CONDITION_METADATAS = 'scheduling_condition_metadata',
+ SCHEDULING_DSL_TYPESCRIPT = 'schedulingDslTypescript',
+ SCHEDULING_GOAL_METADATA = 'scheduling_goal_metadata_by_pk',
+ SCHEDULING_GOAL_METADATAS = 'scheduling_goal_metadata',
+ SCHEDULING_REQUESTS = 'scheduling_request',
+ SCHEDULING_SPECIFICATION = 'scheduling_specification_by_pk',
+ SCHEDULING_SPECIFICATION_CONDITIONS = 'scheduling_specification_conditions',
+ SCHEDULING_SPECIFICATION_GOALS = 'scheduling_specification_goals',
+ SEEN_SOURCES = 'seen_sources',
+ SEQUENCE = 'sequence',
+ SEQUENCE_ADAPTATION = 'sequence_adaptation',
+ SEQUENCE_TO_SIMULATED_ACTIVITY = 'sequence_to_simulated_activity_by_pk',
+ SET_RESOLUTION = 'set_resolution',
+ SET_RESOLUTIONS = 'set_resolution_bulk',
+ SIMULATE = 'simulate',
+ SIMULATIONS = 'simulation',
+ SIMULATION_DATASET = 'simulation_dataset_by_pk',
+ SIMULATION_DATASETS = 'simulation_dataset',
+ SIMULATION_TEMPLATES = 'simulation_template',
+ SPANS = 'span',
+ TAGS = 'tags',
+ TOPIC = 'topic',
+ UPDATE_ACTIVITY_DIRECTIVE = 'update_activity_directive_by_pk',
+ UPDATE_ACTIVITY_DIRECTIVES = 'update_activity_directive_many',
+ UPDATE_ACTIVITY_PRESET = 'update_activity_presets_by_pk',
+ UPDATE_CONSTRAINT_METADATA = 'update_constraint_metadata_by_pk',
+ UPDATE_CONSTRAINT_SPECIFICATION = 'update_constraint_specification_by_pk',
+ UPDATE_CONSTRAINT_MODEL_SPECIFICATION = 'update_constraint_model_specification_by_pk',
+ UPDATE_DERIVATION_GROUP_ACKNOWLEDGED = 'update_plan_derivation_group_by_pk',
+ UPDATE_EXPANSION_RULE = 'update_expansion_rule_by_pk',
+ UPDATE_MISSION_MODEL = 'update_mission_model_by_pk',
+ UPDATE_PARCEL = 'update_parcel_by_pk',
+ UPDATE_PLAN_SNAPSHOT = 'update_plan_snapshot_by_pk',
+ UPDATE_PLAN = 'update_plan_by_pk',
+ UPDATE_SCHEDULING_CONDITION_METADATA = 'update_scheduling_condition_metadata_by_pk',
+ UPDATE_SCHEDULING_GOAL_METADATA = 'update_scheduling_goal_metadata_by_pk',
+ UPDATE_SCHEDULING_REQUEST = 'update_scheduling_request',
+ UPDATE_SCHEDULING_SPECIFICATION = 'update_scheduling_specification_by_pk',
+ UPDATE_SCHEDULING_SPECIFICATION_CONDITION = 'update_scheduling_specification_conditions_by_pk',
+ UPDATE_SCHEDULING_SPECIFICATION_GOAL = 'update_scheduling_specification_goals_by_pk',
+ UPDATE_SCHEDULING_CONDITION_MODEL_SPECIFICATION = 'update_scheduling_model_specification_conditions_by_pk',
+ UPDATE_SCHEDULING_CONDITION_MODEL_SPECIFICATIONS = 'update_scheduling_model_specification_conditions',
+ UPDATE_SCHEDULING_GOAL_MODEL_SPECIFICATION = 'update_scheduling_model_specification_goals_by_pk',
+ UPDATE_SCHEDULING_GOAL_MODEL_SPECIFICATIONS = 'update_scheduling_model_specification_goals',
+ UPDATE_SIMULATION = 'update_simulation_by_pk',
+ UPDATE_SIMULATIONS = 'update_simulation',
+ UPDATE_SIMULATION_DATASET = 'update_simulation_dataset_by_pk',
+ UPDATE_SIMULATION_TEMPLATE = 'update_simulation_template_by_pk',
+ UPDATE_TAGS = 'update_tags_by_pk',
+ UPDATE_USER_SEQUENCE = 'update_user_sequence_by_pk',
+ UPDATE_VIEW = 'update_view_by_pk',
+ UPDATE_WORKSPACE = 'update_workspace_by_pk',
+ UPLOADED_FILES = 'uploaded_file',
+ UPLOAD_DICTIONARY = 'uploadDictionary',
+ USER_ROLE_PERMISSION = 'user_role_permission',
+ USER_SEQUENCE = 'user_sequence_by_pk',
+ USER_SEQUENCES = 'user_sequence',
+ USERS = 'users',
+ VALIDATE_ACTIVITY_ARGUMENTS = 'validateActivityArguments',
+ VIEW = 'view_by_pk',
+ VIEWS = 'view',
+ WORKSPACES = 'workspace',
+ WITHDRAW_MERGE_REQUEST = 'withdraw_merge_request',
+}
diff --git a/src/utilities/codemirror/cdlDictionary.test.ts b/src/utilities/codemirror/cdlDictionary.test.ts
index a3c550721b..d4f39af24d 100644
--- a/src/utilities/codemirror/cdlDictionary.test.ts
+++ b/src/utilities/codemirror/cdlDictionary.test.ts
@@ -151,10 +151,10 @@ describe('cdl parse tests', async () => {
expect(arg1Range?.min).toBe(-255);
expect(arg1Range?.max).toBe(255);
- const arg_2_string: FswCommandArgumentVarString = cdlDictionary.fswCommands[1].argumentMap
+ const arg2String: FswCommandArgumentVarString = cdlDictionary.fswCommands[1].argumentMap
.arg_2_string as FswCommandArgumentVarString;
- expect(arg_2_string.arg_type).toBe('var_string');
- expect(arg_2_string.max_bit_length).toBe(312);
+ expect(arg2String.arg_type).toBe('var_string');
+ expect(arg2String.max_bit_length).toBe(312);
expect(cdlDictionary.fswCommands[1].description).toEqual('Test Command with 3 arguments');
@@ -181,16 +181,16 @@ describe('cdl parse tests', async () => {
expect(cdlDictionary.fswCommands.length).toBe(2);
- const cmd_0 = cdlDictionary.fswCommands[0];
- expect(cmd_0.arguments.length).toBe(2);
+ const cmd0 = cdlDictionary.fswCommands[0];
+ expect(cmd0.arguments.length).toBe(2);
- const cmd_1 = cdlDictionary.fswCommands[1];
- expect(cmd_1.arguments.length).toBe(2);
+ const cmd1 = cdlDictionary.fswCommands[1];
+ expect(cmd1.arguments.length).toBe(2);
- const cmd_1_arg_1 = cmd_1.argumentMap.numeric_arg_2 as FswCommandArgumentInteger;
- expect(cmd_1_arg_1.arg_type).toBe('integer');
+ const cmd1Arg1 = cmd1.argumentMap.numeric_arg_2 as FswCommandArgumentInteger;
+ expect(cmd1Arg1.arg_type).toBe('integer');
- const localEnum = cmd_1.argumentMap.lookup_local_arg_1 as FswCommandArgumentEnum;
+ const localEnum = cmd1.argumentMap.lookup_local_arg_1 as FswCommandArgumentEnum;
expect(localEnum.arg_type).toBe('enum');
expect(localEnum.range).toEqual(['MODE_A', 'MODE_B', 'MODE_C']);
expect(localEnum.description).toBe('Only used by stem CMD_DEBUG');
diff --git a/src/utilities/codemirror/cdlDictionary.ts b/src/utilities/codemirror/cdlDictionary.ts
index 0eb936e55e..aac607223a 100644
--- a/src/utilities/codemirror/cdlDictionary.ts
+++ b/src/utilities/codemirror/cdlDictionary.ts
@@ -22,16 +22,16 @@ const ARG_TITLE = /^\s*TITLE\s*:\s*"(.*)"/;
export function parseCdlDictionary(contents: string, id?: string, path?: string): CommandDictionary {
const lines = contents.split('\n').filter(line => line.trim());
- let mission_name = '';
+ let missionName = '';
let version = '';
- const spacecraft_ids: number[] = [];
+ const spacecraftIds: number[] = [];
const lineIterator = lines.values();
for (const line of lineIterator) {
const projectMatch = line.match(/^PROJECT\s*:\s*"([^"]*)"/);
if (projectMatch) {
- mission_name = projectMatch[1];
+ missionName = projectMatch[1];
break;
}
}
@@ -52,16 +52,16 @@ export function parseCdlDictionary(contents: string, id?: string, path?: string)
}
const spacecraftIdMatch = childLine.match(/^\s*(\d+)\s*=\s*'[\dA-Fa-f]+'/);
if (spacecraftIdMatch) {
- spacecraft_ids.push(parseInt(spacecraftIdMatch[1]));
+ spacecraftIds.push(parseInt(spacecraftIdMatch[1]));
}
}
}
}
const header: Readonly = {
- mission_name,
+ mission_name: missionName,
schema_version: '1.0',
- spacecraft_ids,
+ spacecraft_ids: spacecraftIds,
version,
};
@@ -211,10 +211,10 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
let conversion = '';
let range: NumericRange | null = null;
- let bit_length: number | null = null;
- let default_value_string: string | null = null;
+ let bitLength: number | null = null;
+ let defaultValueString: string | null = null;
let description: string = '';
- let default_value: number | null = null;
+ let defaultValue: number | null = null;
for (const line of lines) {
if (line.match(END_NUMERIC_ARG)) {
@@ -225,7 +225,7 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
if (conversion.includes('DECIMAL')) {
const defaultMatch = line.match(/^\s*DEFAULT\s*:\s*'(-?\d+)'/);
if (defaultMatch) {
- default_value = parseInt(defaultMatch[1], 10);
+ defaultValue = parseInt(defaultMatch[1], 10);
}
range = {
max: parseInt(rangeMatch[2], 10),
@@ -234,7 +234,7 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
} else if (conversion === 'HEX') {
const defaultMatch = line.match(/^\s*DEFAULT\s*:\s*'([\dA-Fa-f]+)'/);
if (defaultMatch) {
- default_value = parseInt(defaultMatch[1], 16);
+ defaultValue = parseInt(defaultMatch[1], 16);
}
range = {
max: parseInt(rangeMatch[2], 16),
@@ -243,7 +243,7 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
} else if (conversion === 'IEEE64FLOAT') {
const defaultMatch = line.match(/^\s*DEFAULT\s*:\s*'(.*)'/);
if (defaultMatch) {
- default_value = parseFloat(defaultMatch[1]);
+ defaultValue = parseFloat(defaultMatch[1]);
}
range = {
max: Number.MAX_VALUE,
@@ -255,14 +255,14 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
// LENGTH : 1024
const maxBitMatch = line.match(/^\s*LENGTH\s*:\s*(\d+)/);
if (maxBitMatch) {
- bit_length = parseInt(maxBitMatch[1], 10);
+ bitLength = parseInt(maxBitMatch[1], 10);
}
// DEFAULT : ''
// doesn't handle escaped quotes
const defaultMatch = line.match(/^\s*DEFAULT\s*:\s*'(.*)'/);
if (defaultMatch) {
- default_value_string = defaultMatch[1];
+ defaultValueString = defaultMatch[1];
}
}
} else {
@@ -281,9 +281,9 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
if (conversion === 'ASCII_STRING') {
return {
arg_type: 'var_string',
- default_value: default_value_string,
+ default_value: defaultValueString,
description,
- max_bit_length: bit_length,
+ max_bit_length: bitLength,
name,
prefix_bit_length: null,
valid_regex: null,
@@ -291,8 +291,8 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
} else if (conversion.includes('DECIMAL') || conversion === 'HEX' || conversion === 'MPFTIME') {
return {
arg_type: 'integer',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
name,
range,
@@ -302,8 +302,8 @@ export function parseNumericArgument(lines: string[]): FswCommandArgument {
return {
arg_type: 'float',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
name,
range,
@@ -324,7 +324,7 @@ export function parseLookupArgument(lines: string[], namespace?: string): [FswCo
let description = '';
let conversion = '';
- let bit_length: null | number = null;
+ let bitLength: null | number = null;
const values: EnumValue[] = [];
for (const line of lines) {
if (line.match(END_LOOKUP_ARG)) {
@@ -333,7 +333,7 @@ export function parseLookupArgument(lines: string[], namespace?: string): [FswCo
const lengthMatch = line.match(/^\s*LENGTH\s*:\s*(\d+)/);
if (lengthMatch) {
- bit_length = parseInt(lengthMatch[1], 10);
+ bitLength = parseInt(lengthMatch[1], 10);
continue;
}
@@ -367,19 +367,19 @@ export function parseLookupArgument(lines: string[], namespace?: string): [FswCo
}
}
- const enum_name = namespace ? `__${namespace}_${name}` : name;
+ const enumName = namespace ? `__${namespace}_${name}` : name;
return [
{
arg_type: 'enum',
- bit_length,
+ bit_length: bitLength,
default_value: null,
description,
- enum_name,
+ enum_name: enumName,
name,
range: values.map(value => value.symbol),
},
{
- name: enum_name,
+ name: enumName,
values,
},
];
diff --git a/src/utilities/codemirror/satf/satf-sasf-utils.ts b/src/utilities/codemirror/satf/satf-sasf-utils.ts
index 523c762b36..394af64b6a 100644
--- a/src/utilities/codemirror/satf/satf-sasf-utils.ts
+++ b/src/utilities/codemirror/satf/satf-sasf-utils.ts
@@ -252,10 +252,10 @@ function parseTime(
const splitParentNodeText = parentNodeText.slice(1, parentNodeText.length).split(' ');
if (splitParentNodeText.length > 0) {
- const tag = splitParentNodeText[0];
+ const epochTag = splitParentNodeText[0];
const epochName = unquoteUnescape(splitParentNodeText[1]);
- return { tag: `${epochName}${tag}`, type: 'GROUND_EPOCH' };
+ return { tag: `${epochName}${epochTag}`, type: 'GROUND_EPOCH' };
}
}
} else {
@@ -747,8 +747,8 @@ function parseParameters(
console.log(`type: ${type} is not supported`);
}
- const allowable_values: string[] = [];
- const allowable_ranges: string[] = [];
+ const allowableValues: string[] = [];
+ const allowableRanges: string[] = [];
rangesNode.forEach((range: any) => {
text
.slice(range.from, range.to)
@@ -756,14 +756,14 @@ function parseParameters(
.forEach(r => {
r = r.replaceAll('"', '').trim();
if (r.includes('...')) {
- allowable_ranges.push(r);
+ allowableRanges.push(r);
} else {
- allowable_values.push(r);
+ allowableValues.push(r);
}
});
});
- return `${name} ${type}${enumName}${allowable_ranges.length === 0 ? (allowable_values.length === 0 ? '' : ' ""') : ` "${allowable_ranges.join(', ')}"`}${allowable_values.length === 0 ? '' : ` "${allowable_values.join(', ')}"`}`;
+ return `${name} ${type}${enumName}${allowableRanges.length === 0 ? (allowableValues.length === 0 ? '' : ' ""') : ` "${allowableRanges.join(', ')}"`}${allowableValues.length === 0 ? '' : ` "${allowableValues.join(', ')}"`}`;
})
.join('\n');
parameter += `\n@${variableType}_END\n\n`;
diff --git a/src/utilities/codemirror/satf/satf-sasf.grammar b/src/utilities/codemirror/satf/satf-sasf.grammar
index e40037d42f..5e9a4865c7 100644
--- a/src/utilities/codemirror/satf/satf-sasf.grammar
+++ b/src/utilities/codemirror/satf/satf-sasf.grammar
@@ -86,7 +86,7 @@
ActivityTypeCode { identifier }
ReturnType { identifier }
Flags { Flag+ }
- Flag { identifier | ("|" identifier) } //todo
+ Flag { identifier | ("|" identifier) }
Help { (anyASCII | newLine)+ }
SeqgenText { anyASCII }
VirtualChannel { identifier}
diff --git a/src/utilities/codemirror/vml/vmlAdaptation.ts b/src/utilities/codemirror/vml/vmlAdaptation.ts
index 9bcf389a4f..6c87b37740 100644
--- a/src/utilities/codemirror/vml/vmlAdaptation.ts
+++ b/src/utilities/codemirror/vml/vmlAdaptation.ts
@@ -397,7 +397,7 @@ export function getDefaultArgumentValue(argDef: FswCommandArgument, enumMap: Enu
return '""';
}
-export function parseFunctionSignatures(contents: string, workspace_id: number): LibrarySequence[] {
+export function parseFunctionSignatures(contents: string, workspaceId: number): LibrarySequence[] {
return vmlBlockLibraryToCommandDictionary(contents).fswCommands.map(
(fswCommand): LibrarySequence => ({
name: fswCommand.stem,
@@ -409,7 +409,7 @@ export function parseFunctionSignatures(contents: string, workspace_id: number):
}),
tree: VmlLanguage.parser.parse(contents),
type: SequenceTypes.LIBRARY,
- workspace_id,
+ workspace_id: workspaceId,
}),
);
}
diff --git a/src/utilities/codemirror/vml/vmlBlockLibrary.ts b/src/utilities/codemirror/vml/vmlBlockLibrary.ts
index 47e5672801..158d46c9c2 100644
--- a/src/utilities/codemirror/vml/vmlBlockLibrary.ts
+++ b/src/utilities/codemirror/vml/vmlBlockLibrary.ts
@@ -59,14 +59,14 @@ export function vmlBlockLibraryToCommandDictionary(vml: string, id?: string, pat
),
].filter(filterEmpty);
- const mission_name = '';
- const spacecraft_ids = [0];
+ const missionName = '';
+ const spacecraftIds = [0];
const version = '';
const header: Readonly = {
- mission_name,
+ mission_name: missionName,
schema_version: '1.0',
- spacecraft_ids,
+ spacecraft_ids: spacecraftIds,
version,
};
@@ -114,7 +114,7 @@ function inputToArgument(parameterNode: SyntaxNode, vml: string): FswCommandArgu
return null;
}
- const default_value: number | string | null = parseDefaultValue(parameterNode.firstChild, vml);
+ const defaultValue: number | string | null = parseDefaultValue(parameterNode.firstChild, vml);
const description = parameterNodeToDescription(parameterNode, vml);
const units = ''; // not specified in VML
const range = parseRange(parameterNode.firstChild, vml);
@@ -127,7 +127,7 @@ function inputToArgument(parameterNode: SyntaxNode, vml: string): FswCommandArgu
case TOKEN_UINT:
case TOKEN_INT:
case TOKEN_DOUBLE: {
- const arg_type: 'float' | 'integer' | 'unsigned' = (
+ const argType: 'float' | 'integer' | 'unsigned' = (
{
[TOKEN_DOUBLE]: 'float',
[TOKEN_INT]: 'integer',
@@ -135,12 +135,12 @@ function inputToArgument(parameterNode: SyntaxNode, vml: string): FswCommandArgu
} as const
)[dataKindNode.name];
- const bit_length: number = dataKindNode.name === TOKEN_DOUBLE ? 64 : 32;
+ const bitLength: number = dataKindNode.name === TOKEN_DOUBLE ? 64 : 32;
return {
- arg_type,
- bit_length,
- default_value: typeof default_value === 'number' ? default_value : null,
+ arg_type: argType,
+ bit_length: bitLength,
+ default_value: typeof defaultValue === 'number' ? defaultValue : null,
description,
name,
range: isNumericRange(range) ? range : null,
@@ -150,7 +150,7 @@ function inputToArgument(parameterNode: SyntaxNode, vml: string): FswCommandArgu
case TOKEN_STRING: {
return {
arg_type: 'var_string',
- default_value: typeof default_value === 'string' ? default_value : null,
+ default_value: typeof defaultValue === 'string' ? defaultValue : null,
description,
max_bit_length: null,
name,
@@ -164,7 +164,7 @@ function inputToArgument(parameterNode: SyntaxNode, vml: string): FswCommandArgu
return {
arg_type: 'time',
bit_length: 32,
- default_value,
+ default_value: defaultValue,
description,
name,
units,
@@ -266,8 +266,8 @@ function variableToParam(
| FswCommandArgumentInteger
| FswCommandArgumentVarString
| FswCommandArgumentUnsigned {
- const bit_length = null;
- const default_value = null;
+ const bitLength = null;
+ const defaultValue = null;
const description = '';
const name = variable.name;
const range = null;
@@ -276,8 +276,8 @@ function variableToParam(
case 'ENUM':
return {
arg_type: 'enum',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
enum_name: name,
name,
@@ -286,7 +286,7 @@ function variableToParam(
case 'STRING':
return {
arg_type: 'var_string',
- default_value,
+ default_value: defaultValue,
description,
max_bit_length: null,
name,
@@ -296,8 +296,8 @@ function variableToParam(
case 'INT':
return {
arg_type: 'integer',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
name,
range,
@@ -306,8 +306,8 @@ function variableToParam(
case 'UINT':
return {
arg_type: 'unsigned',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
name,
range,
@@ -316,8 +316,8 @@ function variableToParam(
case 'FLOAT':
return {
arg_type: 'float',
- bit_length,
- default_value,
+ bit_length: bitLength,
+ default_value: defaultValue,
description,
name,
range,
diff --git a/src/utilities/codemirror/vml/vmlFolder.ts b/src/utilities/codemirror/vml/vmlFolder.ts
index ded303e59b..bb5158ce33 100644
--- a/src/utilities/codemirror/vml/vmlFolder.ts
+++ b/src/utilities/codemirror/vml/vmlFolder.ts
@@ -123,7 +123,7 @@ export function computeBlocks(state: EditorState): TreeState {
};
stack.push({
- node: node,
+ node,
stem: category,
});
}
diff --git a/src/utilities/codemirror/vml/vmlLinter.ts b/src/utilities/codemirror/vml/vmlLinter.ts
index 1008d6d7d1..031303dbc4 100644
--- a/src/utilities/codemirror/vml/vmlLinter.ts
+++ b/src/utilities/codemirror/vml/vmlLinter.ts
@@ -70,29 +70,29 @@ function validateGlobals(input: string, tree: Tree, globals: GlobalType[]): Diag
// for each block, sequence, etc -- determine what variables are declared
const declaredVariables: { [to: number]: Set } = {};
- for (const node of filterNodes(tree.cursor(), node => node.name === RULE_TIME_TAGGED_STATEMENTS)) {
- declaredVariables[node.from] = new Set(getVmlVariables(input, tree, node.to));
+ for (const filteredNode of filterNodes(tree.cursor(), node => node.name === RULE_TIME_TAGGED_STATEMENTS)) {
+ declaredVariables[filteredNode.from] = new Set(getVmlVariables(input, tree, filteredNode.to));
}
// check all variables
- for (const node of filterNodes(tree.cursor(), node => node.name === RULE_VARIABLE_NAME)) {
+ for (const filteredNode of filterNodes(tree.cursor(), node => node.name === RULE_VARIABLE_NAME)) {
if (diagnostics.length >= 10) {
// stop checking to avoid flood of errors if adaptation is misconfigured
break;
}
- if (getNearestAncestorNodeOfType(node, [RULE_PARAMETER, RULE_VARIABLE_DECLARATION_TYPE])) {
+ if (getNearestAncestorNodeOfType(filteredNode, [RULE_PARAMETER, RULE_VARIABLE_DECLARATION_TYPE])) {
// don't check variable declarations
continue;
}
- const variableReference = input.slice(node.from, node.to);
+ const variableReference = input.slice(filteredNode.from, filteredNode.to);
if (globalNames.has(variableReference)) {
// matches global
continue;
}
- const timeTaggedStatementsNode = getNearestAncestorNodeOfType(node, [RULE_TIME_TAGGED_STATEMENTS]);
+ const timeTaggedStatementsNode = getNearestAncestorNodeOfType(filteredNode, [RULE_TIME_TAGGED_STATEMENTS]);
const variablesInScope = timeTaggedStatementsNode ? declaredVariables[timeTaggedStatementsNode.from] : new Set([]);
if (variablesInScope.has(variableReference)) {
// matches local
@@ -101,7 +101,7 @@ function validateGlobals(input: string, tree: Tree, globals: GlobalType[]): Diag
const symbolsInScope = [...Array.from(variablesInScope), ...Array.from(globalNames)];
const alternative = closest(variableReference, symbolsInScope);
- diagnostics.push(suggestAlternative(node, variableReference, 'symbolic reference', alternative));
+ diagnostics.push(suggestAlternative(filteredNode, variableReference, 'symbolic reference', alternative));
}
return diagnostics;
}
@@ -174,12 +174,12 @@ function suggestAlternative(node: SyntaxNode, current: string, typeLabel: string
return {
actions: [
{
- apply(view: EditorView, from: number, to: number) {
+ apply(view: EditorView, applyFrom: number, applyTo: number) {
view.dispatch({
changes: {
- from,
+ from: applyFrom,
insert: alternative,
- to,
+ to: applyTo,
},
});
},
@@ -349,12 +349,12 @@ function validateArgument(
{
actions: [
{
- apply(view: EditorView, from: number, to: number) {
+ apply(view: EditorView, applyFrom: number, applyTo: number) {
view.dispatch({
changes: {
- from,
+ from: applyFrom,
insert: alternative,
- to,
+ to: applyTo,
},
});
},
@@ -390,12 +390,12 @@ function unquote(s: string): string {
*/
function validateParserErrors(tree: Tree, sequence: string, text: Text): Diagnostic[] {
const errorRegions: { from: number; to: number }[] = [];
- for (const node of filterNodes(tree.cursor(), node => node.name === TOKEN_ERROR)) {
+ for (const filteredNode of filterNodes(tree.cursor(), node => node.name === TOKEN_ERROR)) {
const currentRegion = errorRegions.at(-1);
- if (currentRegion?.to === node.from) {
- currentRegion.to = node.to;
+ if (currentRegion?.to === filteredNode.from) {
+ currentRegion.to = filteredNode.to;
} else {
- errorRegions.push({ from: node.from, to: node.to });
+ errorRegions.push({ from: filteredNode.from, to: filteredNode.to });
}
if (errorRegions.length > MAX_PARSER_ERRORS) {
diff --git a/src/utilities/effects.ts b/src/utilities/effects.ts
index 426e6ecea8..a87fa92ec4 100644
--- a/src/utilities/effects.ts
+++ b/src/utilities/effects.ts
@@ -11,37 +11,62 @@ import { DictionaryTypes } from '../enums/dictionaryTypes';
import { SchedulingType } from '../enums/scheduling';
import { SearchParameters } from '../enums/searchParameters';
import { Status } from '../enums/status';
-import { activityDirectivesDB, selectedActivityDirectiveId } from '../stores/activities';
import {
- rawCheckConstraintsStatus,
- rawConstraintResponses,
+ activityDirectivesDB as activityDirectivesDBStore,
+ selectedActivityDirectiveId as selectedActivityDirectiveIdStore,
+} from '../stores/activities';
+import {
+ rawCheckConstraintsStatus as rawCheckConstraintsStatusStore,
+ rawConstraintResponses as rawConstraintResponsesStore,
resetConstraintStoresForSimulation,
} from '../stores/constraints';
import { catchError, catchSchedulingError } from '../stores/errors';
import {
- createExpansionRuleError,
- creatingExpansionSequence,
- planExpansionStatus,
- savingExpansionRule,
- savingExpansionSet,
+ createExpansionRuleError as createExpansionRuleErrorStore,
+ creatingExpansionSequence as creatingExpansionSequenceStore,
+ planExpansionStatus as planExpansionStatusStore,
+ savingExpansionRule as savingExpansionRuleStore,
+ savingExpansionSet as savingExpansionSetStore,
} from '../stores/expansion';
-import { createExternalEventTypeError, creatingExternalEventType } from '../stores/external-event';
import {
- createDerivationGroupError,
- createExternalSourceError,
- createExternalSourceTypeError,
- creatingExternalSource,
- derivationGroupPlanLinkError,
- parsingError,
+ createExternalEventTypeError as createExternalEventTypeErrorStore,
+ creatingExternalEventType as creatingExternalEventTypeStore,
+} from '../stores/external-event';
+import {
+ createDerivationGroupError as createDerivationGroupErrorStore,
+ createExternalSourceError as createExternalSourceErrorStore,
+ createExternalSourceTypeError as createExternalSourceTypeErrorStore,
+ creatingExternalSource as creatingExternalSourceStore,
+ derivationGroupPlanLinkError as derivationGroupPlanLinkErrorStore,
+ parsingError as parsingErrorStore,
} from '../stores/external-source';
-import { createModelError, creatingModel, models } from '../stores/model';
-import { createPlanError, creatingPlan, planId } from '../stores/plan';
-import { schedulingRequests, selectedSpecId } from '../stores/scheduling';
-import { sequenceAdaptations } from '../stores/sequence-adaptation';
-import { channelDictionaries, commandDictionaries, parameterDictionaries } from '../stores/sequencing';
-import { selectedSpanId, simulationDataset, simulationDatasetId } from '../stores/simulation';
-import { createTagError } from '../stores/tags';
-import { applyViewUpdate, view, viewUpdateRow, viewUpdateTimeline } from '../stores/views';
+import {
+ createModelError as createModelErrorStore,
+ creatingModel as creatingModelStore,
+ models as modelsStore,
+} from '../stores/model';
+import {
+ createPlanError as createPlanErrorStore,
+ creatingPlan as creatingPlanStore,
+ planId as planIdStore,
+} from '../stores/plan';
+import {
+ schedulingRequests as schedulingRequestsStore,
+ selectedSpecId as selectedSpecIdStore,
+} from '../stores/scheduling';
+import { sequenceAdaptations as sequenceAdaptationsStore } from '../stores/sequence-adaptation';
+import {
+ channelDictionaries as channelDictionariesStore,
+ commandDictionaries as commandDictionariesStore,
+ parameterDictionaries as parameterDictionariesStore,
+} from '../stores/sequencing';
+import {
+ selectedSpanId as selectedSpanIdStore,
+ simulationDatasetId as simulationDatasetIdStore,
+ simulationDataset as simulationDatasetStore,
+} from '../stores/simulation';
+import { createTagError as createTagErrorStore } from '../stores/tags';
+import { applyViewUpdate, view as viewStore, viewUpdateRow, viewUpdateTimeline } from '../stores/views';
import type {
ActivityDirective,
ActivityDirectiveDB,
@@ -424,18 +449,12 @@ const effects = {
async checkConstraints(plan: Plan, user: User | null): Promise {
try {
- rawCheckConstraintsStatus.set(Status.Incomplete);
+ rawCheckConstraintsStatusStore.set(Status.Incomplete);
if (plan !== null) {
const { id: planId } = plan;
- const data = await reqHasura(
- gql.CHECK_CONSTRAINTS,
- {
- planId,
- },
- user,
- );
+ const data = await reqHasura(gql.CHECK_CONSTRAINTS, { planId }, user);
if (data.constraintResponses) {
- rawConstraintResponses.set(data.constraintResponses);
+ rawConstraintResponsesStore.set(data.constraintResponses);
// find only the constraints compiled.
const successfulConstraintResults: ConstraintResult[] = data.constraintResponses
@@ -447,13 +466,13 @@ const effects = {
);
if (successfulConstraintResults.length === 0 && data.constraintResponses.length > 0) {
showFailureToast('All Constraints Failed');
- rawCheckConstraintsStatus.set(Status.Failed);
+ rawCheckConstraintsStatusStore.set(Status.Failed);
} else if (successfulConstraintResults.length !== data.constraintResponses.length) {
showFailureToast('Constraints Partially Checked');
- rawCheckConstraintsStatus.set(Status.Failed);
+ rawCheckConstraintsStatusStore.set(Status.Failed);
} else {
showSuccessToast('All Constraints Checked');
- rawCheckConstraintsStatus.set(Status.Complete);
+ rawCheckConstraintsStatusStore.set(Status.Complete);
}
if (failedConstraintResponses.length > 0) {
@@ -539,7 +558,7 @@ const effects = {
async createActivityDirective(
argumentsMap: ArgumentsMap,
- start_time_doy: string,
+ startTimeDoy: string,
type: string,
name: string,
metadata: ActivityMetadata,
@@ -552,7 +571,7 @@ const effects = {
}
if (plan !== null) {
- const start_offset = getIntervalFromDoyRange(plan.start_time_doy, start_time_doy);
+ const startOffset = getIntervalFromDoyRange(plan.start_time_doy, startTimeDoy);
const activityDirectiveInsertInput: ActivityDirectiveInsertInput = {
anchor_id: null,
anchored_to_start: true,
@@ -560,7 +579,7 @@ const effects = {
metadata,
name,
plan_id: plan.id,
- start_offset,
+ start_offset: startOffset,
type,
};
const data = await reqHasura(
@@ -574,7 +593,7 @@ const effects = {
if (newActivityDirective != null) {
const { id } = newActivityDirective;
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).map(directive => {
if (directive.id === id) {
return newActivityDirective;
@@ -582,8 +601,8 @@ const effects = {
return directive;
});
});
- selectedActivityDirectiveId.set(id);
- selectedSpanId.set(null);
+ selectedActivityDirectiveIdStore.set(id);
+ selectedSpanIdStore.set(null);
showSuccessToast('Activity Directive Created Successfully');
} else {
@@ -608,16 +627,16 @@ const effects = {
}
const data = await reqHasura<{ affected_rows: number }>(gql.CREATE_ACTIVITY_DIRECTIVE_TAGS, { tags }, user);
- const { insert_activity_directive_tags } = data;
- if (insert_activity_directive_tags != null) {
- const { affected_rows } = insert_activity_directive_tags;
+ const { insert_activity_directive_tags: insertActivityDirectiveTags } = data;
+ if (insertActivityDirectiveTags != null) {
+ const { affected_rows: affectedRows } = insertActivityDirectiveTags;
- if (affected_rows !== tags.length) {
+ if (affectedRows !== tags.length) {
throw Error('Some activity directive tags were not successfully created');
}
showSuccessToast('Activity Directive Updated Successfully');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create activity directive tags');
}
@@ -784,7 +803,7 @@ const effects = {
user: User | null,
): Promise {
try {
- createDerivationGroupError.set(null);
+ createDerivationGroupErrorStore.set(null);
const { createDerivationGroup: created } = await reqHasura(
gql.CREATE_DERIVATION_GROUP,
{ derivationGroup },
@@ -799,26 +818,26 @@ const effects = {
} catch (e) {
catchError('Derivation Group Create Failed', e as Error);
showFailureToast('Derivation Group Create Failed');
- createDerivationGroupError.set((e as Error).message);
+ createDerivationGroupErrorStore.set((e as Error).message);
return undefined;
}
},
async createExpansionRule(rule: ExpansionRuleInsertInput, user: User | null): Promise {
try {
- createExpansionRuleError.set(null);
+ createExpansionRuleErrorStore.set(null);
if (!queryPermissions.CREATE_EXPANSION_RULE(user)) {
throwPermissionError('create an expansion rule');
}
- savingExpansionRule.set(true);
+ savingExpansionRuleStore.set(true);
const data = await reqHasura(gql.CREATE_EXPANSION_RULE, { rule }, user);
const { createExpansionRule } = data;
if (createExpansionRule != null) {
const { id } = createExpansionRule;
showSuccessToast('Expansion Rule Created Successfully');
- savingExpansionRule.set(false);
+ savingExpansionRuleStore.set(false);
return id;
} else {
throw Error(`Unable to create expansion rule "${rule.name}"`);
@@ -826,8 +845,8 @@ const effects = {
} catch (e) {
catchError('Expansion Rule Create Failed', e as Error);
showFailureToast('Expansion Rule Create Failed');
- savingExpansionRule.set(false);
- createExpansionRuleError.set((e as Error).message);
+ savingExpansionRuleStore.set(false);
+ createExpansionRuleErrorStore.set((e as Error).message);
return null;
}
},
@@ -839,15 +858,15 @@ const effects = {
}
const data = await reqHasura<{ affected_rows: number }>(gql.CREATE_EXPANSION_RULE_TAGS, { tags }, user);
- const { insert_expansion_rule_tags } = data;
- if (insert_expansion_rule_tags != null) {
- const { affected_rows } = insert_expansion_rule_tags;
+ const { insert_expansion_rule_tags: insertExpansionRuleTags } = data;
+ if (insertExpansionRuleTags != null) {
+ const { affected_rows: affectedRows } = insertExpansionRuleTags;
- if (affected_rows !== tags.length) {
+ if (affectedRows !== tags.length) {
throw Error('Some expansion rule tags were not successfully created');
}
- return affected_rows;
+ return affectedRows;
} else {
throw Error(`Unable to create expansion rule tags`);
}
@@ -864,7 +883,7 @@ const effects = {
throwPermissionError('create an expansion sequence');
}
- creatingExpansionSequence.set(true);
+ creatingExpansionSequenceStore.set(true);
const sequence: ExpansionSequenceInsertInput = {
metadata: {},
seq_id: seqId,
@@ -873,14 +892,14 @@ const effects = {
const data = await reqHasura(gql.CREATE_EXPANSION_SEQUENCE, { sequence }, user);
if (data.createExpansionSequence != null) {
showSuccessToast('Expansion Sequence Created Successfully');
- creatingExpansionSequence.set(false);
+ creatingExpansionSequenceStore.set(false);
} else {
throw Error(`Unable to create expansion sequence with ID: "${seqId}"`);
}
} catch (e) {
catchError('Expansion Sequence Create Failed', e as Error);
showFailureToast('Expansion Sequence Create Failed');
- creatingExpansionSequence.set(false);
+ creatingExpansionSequenceStore.set(false);
}
},
@@ -898,7 +917,7 @@ const effects = {
throwPermissionError('create an expansion set');
}
- savingExpansionSet.set(true);
+ savingExpansionSetStore.set(true);
const data = await reqHasura(
gql.CREATE_EXPANSION_SET,
{
@@ -914,7 +933,7 @@ const effects = {
if (createExpansionSet != null) {
const { id } = createExpansionSet;
showSuccessToast('Expansion Set Created Successfully');
- savingExpansionSet.set(false);
+ savingExpansionSetStore.set(false);
return id;
} else {
throw Error('Unable to create expansion set');
@@ -922,15 +941,15 @@ const effects = {
} catch (e) {
catchError('Expansion Set Create Failed', e as Error);
showFailureToast('Expansion Set Create Failed');
- savingExpansionSet.set(false);
+ savingExpansionSetStore.set(false);
return null;
}
},
async createExternalEventType(eventType: ExternalEventTypeInsertInput, user: User | null) {
try {
- creatingExternalEventType.set(true);
- createExternalEventTypeError.set(null);
+ creatingExternalEventTypeStore.set(true);
+ createExternalEventTypeErrorStore.set(null);
if (eventType) {
const { createExternalEventType: created } = await reqHasura(
gql.CREATE_EXTERNAL_EVENT_TYPE,
@@ -939,7 +958,7 @@ const effects = {
);
if (created) {
showSuccessToast('External Event Type Created Successfully');
- creatingExternalEventType.set(false);
+ creatingExternalEventTypeStore.set(false);
return created.name;
} else {
throw Error('Unable to create external event type');
@@ -950,8 +969,8 @@ const effects = {
} catch (e) {
catchError('External Event Type Create Failed', e as Error);
showFailureToast('External Event Type Create Failed');
- createExternalEventTypeError.set((e as Error).message);
- creatingExternalEventType.set(false);
+ createExternalEventTypeErrorStore.set((e as Error).message);
+ creatingExternalEventTypeStore.set(false);
}
},
@@ -969,8 +988,8 @@ const effects = {
if (!queryPermissions.CREATE_EXTERNAL_SOURCE(user)) {
throwPermissionError('upload an external source');
}
- creatingExternalSource.set(true);
- createExternalSourceError.set(null);
+ creatingExternalSourceStore.set(true);
+ createExternalSourceErrorStore.set(null);
// Create mutation inputs for Hasura
const externalSourceTypeInsert: ExternalSourceTypeInsertInput = {
@@ -990,16 +1009,16 @@ const effects = {
const validAtFormatted: string | undefined = convertDoyToYmd(validAt.replaceAll('Z', ''))?.replace('Z', '+00:00');
if (!startTimeFormatted || !endTimeFormatted || !validAtFormatted) {
showFailureToast('Parsing failed.');
- parsingError.set(`Parsing failed - parsing dates in input failed. ${startTime}, ${endTime}, ${validAt}`);
- creatingExternalSource.set(false);
+ parsingErrorStore.set(`Parsing failed - parsing dates in input failed. ${startTime}, ${endTime}, ${validAt}`);
+ creatingExternalSourceStore.set(false);
return;
}
// Check that the start and end times are logical
if (new Date(startTimeFormatted) > new Date(endTimeFormatted)) {
showFailureToast('Parsing failed.');
- parsingError.set(`Parsing failed - start time ${startTimeFormatted} after end time ${endTimeFormatted}.`);
- creatingExternalSource.set(false);
+ parsingErrorStore.set(`Parsing failed - start time ${startTimeFormatted} after end time ${endTimeFormatted}.`);
+ creatingExternalSourceStore.set(false);
return;
}
@@ -1030,7 +1049,7 @@ const effects = {
} catch (error) {
showFailureToast('Parsing failed.');
catchError(`Event duration has invalid format: ${externalEvent.key}\n`, error as Error);
- creatingExternalSource.set(false);
+ creatingExternalSourceStore.set(false);
return;
}
@@ -1041,10 +1060,10 @@ const effects = {
!(externalEventStart >= Date.parse(startTimeFormatted) && externalEventEnd <= Date.parse(endTimeFormatted))
) {
showFailureToast('Invalid External Event Time Bounds');
- parsingError.set(
+ parsingErrorStore.set(
`Upload failed. Event (${externalEvent.key}) not in bounds of source start and end: occurs from [${new Date(externalEventStart)},${new Date(externalEventEnd)}], not subset of [${new Date(startTimeFormatted)},${new Date(endTimeFormatted)}].\n`,
);
- creatingExternalSource.set(false);
+ creatingExternalSourceStore.set(false);
return;
}
@@ -1078,7 +1097,7 @@ const effects = {
);
if (createExternalSourceResponse !== undefined && createExternalSourceResponse !== null) {
showSuccessToast('External Source Created Successfully');
- creatingExternalSource.set(false);
+ creatingExternalSourceStore.set(false);
return createExternalSourceResponse as ExternalSourceSlim;
} else {
throw Error(`Unable to create external source`);
@@ -1087,11 +1106,11 @@ const effects = {
catchError('External Source Create Failed', e as Error);
showFailureToast('External Source Create Failed');
if ((e as Error).message.includes('external_source_type_matches_derivation_group')) {
- createExternalSourceError.set('Cannot duplicate derivation groups!');
+ createExternalSourceErrorStore.set('Cannot duplicate derivation groups!');
} else {
- createExternalSourceError.set((e as Error).message);
+ createExternalSourceErrorStore.set((e as Error).message);
}
- creatingExternalSource.set(false);
+ creatingExternalSourceStore.set(false);
}
},
@@ -1100,7 +1119,7 @@ const effects = {
user: User | null,
): Promise {
try {
- createExternalSourceTypeError.set(null);
+ createExternalSourceTypeErrorStore.set(null);
const { createExternalSourceType: created } = await reqHasura(
gql.CREATE_EXTERNAL_SOURCE_TYPE,
{ sourceType },
@@ -1115,7 +1134,7 @@ const effects = {
} catch (e) {
catchError('External Source Type Create Failed', e as Error);
showFailureToast('External Source Type Create Failed');
- createExternalSourceTypeError.set((e as Error).message);
+ createExternalSourceTypeErrorStore.set((e as Error).message);
return undefined;
}
},
@@ -1137,22 +1156,22 @@ const effects = {
description?: string,
): Promise {
try {
- createModelError.set(null);
+ createModelErrorStore.set(null);
if (!queryPermissions.CREATE_MODEL(user)) {
throwPermissionError('upload a model');
}
- creatingModel.set(true);
+ creatingModelStore.set(true);
const file: File = files[0];
- const jar_id = await effects.uploadFile(file, user);
+ const jarId = await effects.uploadFile(file, user);
showSuccessToast('Model Uploaded Successfully. Processing model...');
- if (jar_id !== null) {
+ if (jarId !== null) {
const modelInsertInput: ModelInsertInput = {
description,
- jar_id,
+ jar_id: jarId,
mission: '',
name,
version,
@@ -1163,8 +1182,8 @@ const effects = {
const { id } = createModel;
showSuccessToast('Model Created Successfully');
- createModelError.set(null);
- creatingModel.set(false);
+ createModelErrorStore.set(null);
+ creatingModelStore.set(false);
return id;
} else {
@@ -1174,8 +1193,8 @@ const effects = {
} catch (e) {
catchError('Model Create Failed', e as Error);
showFailureToast('Model Create Failed');
- createModelError.set((e as Error).message);
- creatingModel.set(false);
+ createModelErrorStore.set((e as Error).message);
+ creatingModelStore.set(false);
}
return null;
@@ -1205,7 +1224,6 @@ const effects = {
},
async createParcelToParameterDictionaries(
- parcelOwner: UserId,
parcelToParameterDictionariesToAdd: Omit[],
user: User | null,
): Promise {
@@ -1213,21 +1231,20 @@ const effects = {
if (!queryPermissions.CREATE_PARCEL_TO_PARAMETER_DICTIONARIES(user)) {
throwPermissionError('create parcel to parameter dictionary');
}
-
const data = await reqHasura<{ returning: ParcelToParameterDictionary[] }>(
gql.CREATE_PARCEL_TO_PARAMETER_DICTIONARIES,
{ parcelToParameterDictionaries: parcelToParameterDictionariesToAdd },
user,
);
- const { insert_parcel_to_parameter_dictionary } = data;
+ const { insert_parcel_to_parameter_dictionary: insertParcelToParameterDictionary } = data;
- if (insert_parcel_to_parameter_dictionary) {
+ if (insertParcelToParameterDictionary) {
showSuccessToast('Parcel to parameter dictionaries created successfully');
} else {
throw Error('Unable to create parcel to parameter dictionaries');
}
- return insert_parcel_to_parameter_dictionary.returning;
+ return insertParcelToParameterDictionary.returning;
} catch (e) {
catchError('Create parcel to parameter dictionaries failed', e as Error);
showFailureToast('Create parcel to parameter dictionaries failed');
@@ -1237,27 +1254,27 @@ const effects = {
},
async createPlan(
- end_time_doy: string,
- model_id: number,
+ endTimeDoy: string,
+ modelId: number,
name: string,
- start_time_doy: string,
- simulation_template_id: number | null,
+ startTimeDoy: string,
+ simulationTemplateId: number | null,
user: User | null,
): Promise {
try {
- createPlanError.set(null);
+ createPlanErrorStore.set(null);
if (!queryPermissions.CREATE_PLAN(user)) {
throwPermissionError('create a plan');
}
- creatingPlan.set(true);
+ creatingPlanStore.set(true);
const planInsertInput: PlanInsertInput = {
- duration: getIntervalFromDoyRange(start_time_doy, end_time_doy),
- model_id,
+ duration: getIntervalFromDoyRange(startTimeDoy, endTimeDoy),
+ model_id: modelId,
name,
- start_time: start_time_doy, // Postgres accepts DOY dates for it's 'timestamptz' type.
+ start_time: startTimeDoy, // Postgres accepts DOY dates for it's 'timestamptz' type.
};
const data = await reqHasura(
gql.CREATE_PLAN,
@@ -1271,7 +1288,7 @@ const effects = {
const { collaborators, created_at, duration, id, owner, revision, start_time, updated_at, updated_by } =
createPlan;
- if (!(await effects.initialSimulationUpdate(id, simulation_template_id, start_time_doy, end_time_doy, user))) {
+ if (!(await effects.initialSimulationUpdate(id, simulationTemplateId, startTimeDoy, endTimeDoy, user))) {
throw Error('Failed to update simulation.');
}
@@ -1279,22 +1296,22 @@ const effects = {
collaborators,
created_at,
duration,
- end_time_doy,
+ end_time_doy: endTimeDoy,
id,
- model_id,
+ model_id: modelId,
name,
owner,
revision,
start_time,
- start_time_doy,
+ start_time_doy: startTimeDoy,
tags: [],
updated_at,
updated_by,
};
showSuccessToast('Plan Created Successfully');
- createPlanError.set(null);
- creatingPlan.set(false);
+ createPlanErrorStore.set(null);
+ creatingPlanStore.set(false);
return plan;
} else {
@@ -1303,8 +1320,8 @@ const effects = {
} catch (e) {
catchError('Plan Create Failed', e as Error);
showFailureToast('Plan Create Failed');
- createPlanError.set((e as Error).message);
- creatingPlan.set(false);
+ createPlanErrorStore.set((e as Error).message);
+ creatingPlanStore.set(false);
return null;
}
@@ -1319,11 +1336,11 @@ const effects = {
const { confirm, value = null } = await showCreatePlanBranchModal(plan);
if (confirm && value) {
- const { name, plan } = value;
- const data = await reqHasura(gql.DUPLICATE_PLAN, { new_plan_name: name, plan_id: plan.id }, user);
- const { duplicate_plan } = data;
- if (duplicate_plan != null) {
- goto(`${base}/plans/${duplicate_plan.new_plan_id}`);
+ const { name, plan: planToBranch } = value;
+ const data = await reqHasura(gql.DUPLICATE_PLAN, { new_plan_name: name, plan_id: planToBranch.id }, user);
+ const { duplicate_plan: duplicatePlan } = data;
+ if (duplicatePlan != null) {
+ goto(`${base}/plans/${duplicatePlan.new_plan_id}`);
showSuccessToast('Branch Created Successfully');
} else {
throw Error('');
@@ -1340,16 +1357,16 @@ const effects = {
const { confirm, value } = await showPlanBranchRequestModal(plan, action);
if (confirm && value) {
- const { source_plan, target_plan } = value;
+ const { source_plan: sourcePlan, target_plan: targetPlan } = value;
- if (!queryPermissions.CREATE_PLAN_MERGE_REQUEST(user, source_plan, target_plan, plan.model)) {
+ if (!queryPermissions.CREATE_PLAN_MERGE_REQUEST(user, sourcePlan, targetPlan, plan.model)) {
throwPermissionError('create a branch merge request');
}
if (action === 'merge') {
await effects.createPlanMergeRequest(
- { ...source_plan, model_id: plan.model_id },
- target_plan,
+ { ...sourcePlan, model_id: plan.model_id },
+ targetPlan,
plan.model,
user,
);
@@ -1367,16 +1384,16 @@ const effects = {
}
const data = await reqHasura(gql.CREATE_PLAN_COLLABORATORS, { collaborators }, user);
- const { insert_plan_collaborators } = data;
+ const { insert_plan_collaborators: insertPlanCollaborators } = data;
- if (insert_plan_collaborators != null) {
- const { affected_rows } = insert_plan_collaborators;
+ if (insertPlanCollaborators != null) {
+ const { affected_rows: affectedRows } = insertPlanCollaborators;
- if (affected_rows !== collaborators.length) {
+ if (affectedRows !== collaborators.length) {
throw Error('Some plan collaborators were not successfully added');
}
showSuccessToast('Plan Collaborators Updated');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create plan collaborators');
}
@@ -1406,11 +1423,11 @@ const effects = {
},
user,
);
- const { create_merge_request } = data;
- if (create_merge_request != null) {
- const { merge_request_id } = create_merge_request;
+ const { create_merge_request: createMergeRequest } = data;
+ if (createMergeRequest != null) {
+ const { merge_request_id: mergeRequestId } = createMergeRequest;
showSuccessToast('Merge Request Created Successfully');
- return merge_request_id;
+ return mergeRequestId;
} else {
throw Error('Unable to create a branch merge request');
}
@@ -1430,8 +1447,8 @@ const effects = {
const { confirm, value = null } = await showCreatePlanSnapshotModal(plan, user);
if (confirm && value) {
- const { description, name, plan, tags } = value;
- await effects.createPlanSnapshotHelper(plan.id, name, description, tags, user);
+ const { description, name, plan: planToSnapshot, tags } = value;
+ await effects.createPlanSnapshotHelper(planToSnapshot.id, name, description, tags, user);
showSuccessToast('Snapshot Created Successfully');
}
} catch (e) {
@@ -1443,14 +1460,14 @@ const effects = {
/**
* This helper function is for handling the creation of a snapshot and associating tags in one go
*
- * @param planId
+ * @param planIdToSnapshot
* @param name
* @param description
* @param tags
* @param user
*/
async createPlanSnapshotHelper(
- planId: number,
+ planIdToSnapshot: number,
name: string,
description: string,
tags: Tag[],
@@ -1458,7 +1475,7 @@ const effects = {
): Promise {
const data = await reqHasura<{ snapshot_id: number }>(
gql.CREATE_PLAN_SNAPSHOT,
- { description, plan_id: planId, snapshot_name: name },
+ { description, plan_id: planIdToSnapshot, snapshot_name: name },
user,
);
const { createSnapshot } = data;
@@ -1466,9 +1483,9 @@ const effects = {
const { snapshot_id } = createSnapshot;
// Associate tags with the snapshot
const newPlanSnapshotTags: PlanSnapshotTagsInsertInput[] =
- tags?.map(({ id: tag_id }) => ({
+ tags?.map(({ id: tagId }) => ({
snapshot_id,
- tag_id,
+ tag_id: tagId,
})) ?? [];
await effects.createPlanSnapshotTags(newPlanSnapshotTags, user, false);
}
@@ -1485,17 +1502,17 @@ const effects = {
}
const data = await reqHasura<{ affected_rows: number }>(gql.CREATE_PLAN_SNAPSHOT_TAGS, { tags }, user);
- const { insert_plan_snapshot_tags } = data;
- if (insert_plan_snapshot_tags != null) {
- const { affected_rows } = insert_plan_snapshot_tags;
+ const { insert_plan_snapshot_tags: insertPlanSnapshotTags } = data;
+ if (insertPlanSnapshotTags != null) {
+ const { affected_rows: affectedRows } = insertPlanSnapshotTags;
- if (affected_rows !== tags.length) {
+ if (affectedRows !== tags.length) {
throw Error('Some plan snapshot tags were not successfully created');
}
if (notify) {
showSuccessToast('Plan Snapshot Updated Successfully');
}
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create plan snapshot tags');
}
@@ -1518,17 +1535,17 @@ const effects = {
}
const data = await reqHasura<{ affected_rows: number }>(gql.CREATE_PLAN_TAGS, { tags }, user);
- const { insert_plan_tags } = data;
- if (insert_plan_tags != null) {
- const { affected_rows } = insert_plan_tags;
+ const { insert_plan_tags: insertPlanTags } = data;
+ if (insertPlanTags != null) {
+ const { affected_rows: affectedRows } = insertPlanTags;
- if (affected_rows !== tags.length) {
+ if (affectedRows !== tags.length) {
throw Error('Some plan tags were not successfully created');
}
if (notify) {
showSuccessToast('Plan Updated Successfully');
}
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create plan tags');
}
@@ -1765,7 +1782,7 @@ const effects = {
},
async createSchedulingGoalPlanSpecification(
- spec_goal: SchedulingGoalPlanSpecInsertInput,
+ specGoal: SchedulingGoalPlanSpecInsertInput,
user: User | null,
): Promise {
try {
@@ -1775,14 +1792,14 @@ const effects = {
const data = await reqHasura(
gql.CREATE_SCHEDULING_GOAL_PLAN_SPECIFICATION,
- { spec_goal },
+ { spec_goal: specGoal },
user,
);
const { createSchedulingSpecGoal } = data;
if (createSchedulingSpecGoal != null) {
- const { specification_id } = createSchedulingSpecGoal;
+ const { specification_id: specificationId } = createSchedulingSpecGoal;
showSuccessToast('New Scheduling Goal Invocation Created Successfully');
- return specification_id;
+ return specificationId;
} else {
throw Error('Unable to create a scheduling spec goal invocation');
}
@@ -1852,25 +1869,25 @@ const effects = {
async createTag(tag: TagsInsertInput, user: User | null, notify: boolean = true): Promise {
try {
- createTagError.set(null);
+ createTagErrorStore.set(null);
if (!queryPermissions.CREATE_TAGS(user)) {
throwPermissionError('create tags');
}
const data = await reqHasura<{ affected_row: number; tag: Tag }>(gql.CREATE_TAG, { tag }, user);
- const { insert_tags_one } = data;
- if (insert_tags_one != null) {
- const { tag: insertedTag } = insert_tags_one;
+ const { insert_tags_one: insertTagsOne } = data;
+ if (insertTagsOne != null) {
+ const { tag: insertedTag } = insertTagsOne;
if (notify) {
showSuccessToast('Tag Created Successfully');
}
- createTagError.set(null);
+ createTagErrorStore.set(null);
return insertedTag;
} else {
throw Error(`Unable to create tag "${tag.name}"`);
}
} catch (e) {
- createTagError.set((e as Error).message);
+ createTagErrorStore.set((e as Error).message);
catchError('Create Tags Failed', e as Error);
showFailureToast('Create Tags Failed');
return null;
@@ -1884,9 +1901,9 @@ const effects = {
}
const data = await reqHasura<{ affected_rows: number; returning: Tag[] }>(gql.CREATE_TAGS, { tags }, user);
- const { insert_tags } = data;
- if (insert_tags != null) {
- const { returning } = insert_tags;
+ const { insert_tags: insertTags } = data;
+ if (insertTags != null) {
+ const { returning } = insertTags;
const createdTags = returning.map(({ name }) => name);
@@ -1946,7 +1963,7 @@ const effects = {
const { newView } = data;
if (newView != null) {
- view.update(() => newView);
+ viewStore.update(() => newView);
setQueryParam(SearchParameters.VIEW_ID, `${newView.id}`);
showSuccessToast('View Created Successfully');
return true;
@@ -2100,12 +2117,12 @@ const effects = {
if (response.delete_activity_by_pk_reanchor_to_anchor_bulk != null) {
const deletedActivityIds = response.delete_activity_by_pk_reanchor_to_anchor_bulk
- .filter(({ change_type }) => {
- return change_type === 'deleted';
+ .filter(({ change_type: changeType }) => {
+ return changeType === 'deleted';
})
.map(({ affected_row: { id } }) => id);
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).filter(directive => {
return deletedActivityIds.indexOf(directive.id) < 1;
});
@@ -2140,12 +2157,12 @@ const effects = {
if (response.delete_activity_by_pk_reanchor_plan_start_bulk != null) {
const deletedActivityIds = response.delete_activity_by_pk_reanchor_plan_start_bulk
- .filter(({ change_type }) => {
- return change_type === 'deleted';
+ .filter(({ change_type: changeType }) => {
+ return changeType === 'deleted';
})
.map(({ affected_row: { id } }) => id);
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).filter(directive => {
return deletedActivityIds.indexOf(directive.id) < 1;
});
@@ -2178,12 +2195,12 @@ const effects = {
if (response.delete_activity_by_pk_delete_subtree_bulk) {
const deletedActivityIds = response.delete_activity_by_pk_delete_subtree_bulk
- .filter(({ change_type }) => {
- return change_type === 'deleted';
+ .filter(({ change_type: changeType }) => {
+ return changeType === 'deleted';
})
.map(({ affected_row: { id } }) => id);
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).filter(directive => {
return deletedActivityIds.indexOf(directive.id) < 1;
});
@@ -2210,7 +2227,7 @@ const effects = {
if (response.deleteActivityDirectives) {
const deletedActivityIds = response.deleteActivityDirectives.returning.map(({ id }) => id);
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).filter(directive => {
return deletedActivityIds.indexOf(directive.id) < 1;
});
@@ -2281,7 +2298,7 @@ const effects = {
const data = await reqHasura<{ id: number }>(gql.DELETE_CHANNEL_DICTIONARY, { id }, user);
if (data.deleteChannelDictionary != null) {
showSuccessToast('Channel Dictionary Deleted Successfully');
- channelDictionaries.filterValueById(id);
+ channelDictionariesStore.filterValueById(id);
} else {
throw Error(`Unable to delete channel dictionary with ID: "${id}"`);
}
@@ -2308,7 +2325,7 @@ const effects = {
const data = await reqHasura<{ id: number }>(gql.DELETE_COMMAND_DICTIONARY, { id }, user);
if (data.deleteCommandDictionary != null) {
showSuccessToast('Command Dictionary Deleted Successfully');
- commandDictionaries.filterValueById(id);
+ commandDictionariesStore.filterValueById(id);
} else {
throw Error(`Unable to delete command dictionary with ID: "${id}"`);
}
@@ -2398,18 +2415,14 @@ const effects = {
}
},
- async deleteDerivationGroupForPlan(
- derivation_group_name: string,
- plan: Plan | null,
- user: User | null,
- ): Promise {
+ async deleteDerivationGroupForPlan(derivationGroupName: string, plan: Plan | null, user: User | null): Promise {
try {
if ((plan && !queryPermissions.DELETE_PLAN_DERIVATION_GROUP(user, plan)) || !plan) {
throwPermissionError('delete a derivation group from the plan');
}
// (use the same as above store, as the behavior is employed on the same panel, therefore so would the error)
- derivationGroupPlanLinkError.set(null);
+ derivationGroupPlanLinkErrorStore.set(null);
if (plan !== null) {
const data = await reqHasura<{
returning: {
@@ -2421,7 +2434,7 @@ const effects = {
{
where: {
_and: {
- derivation_group_name: { _eq: derivation_group_name },
+ derivation_group_name: { _eq: derivationGroupName },
plan_id: { _eq: plan.id },
},
},
@@ -2439,7 +2452,7 @@ const effects = {
} catch (e) {
catchError('Derivation Group De-linking Failed', e as Error);
showFailureToast('Derivation Group De-linking Failed');
- derivationGroupPlanLinkError.set((e as Error).message);
+ derivationGroupPlanLinkErrorStore.set((e as Error).message);
}
},
@@ -2484,10 +2497,10 @@ const effects = {
{ rule_id: ruleId, tag_ids: tagIds },
user,
);
- const { delete_expansion_rule_tags } = data;
- if (delete_expansion_rule_tags != null) {
- const { affected_rows } = delete_expansion_rule_tags;
- return affected_rows;
+ const { delete_expansion_rule_tags: deleteExpansionRuleTags } = data;
+ if (deleteExpansionRuleTags != null) {
+ const { affected_rows: affectedRows } = deleteExpansionRuleTags;
+ return affectedRows;
} else {
throw Error('Unable to delete expansion rule tags');
}
@@ -2526,8 +2539,8 @@ const effects = {
},
async deleteExpansionSequenceToActivity(
- simulation_dataset_id: number,
- simulated_activity_id: number,
+ simulationDatasetId: number,
+ simulatedActivityId: number,
user: User | null,
): Promise {
try {
@@ -2538,8 +2551,8 @@ const effects = {
const data = await reqHasura(
gql.DELETE_EXPANSION_SEQUENCE_TO_ACTIVITY,
{
- simulated_activity_id,
- simulation_dataset_id,
+ simulated_activity_id: simulatedActivityId,
+ simulation_dataset_id: simulationDatasetId,
},
user,
);
@@ -2548,7 +2561,7 @@ const effects = {
return true;
} else {
throw Error(
- `Unable to remove the associated expansion sequence from the dataset ${simulation_dataset_id} and the activity ${simulated_activity_id}`,
+ `Unable to remove the associated expansion sequence from the dataset ${simulationDatasetId} and the activity ${simulatedActivityId}`,
);
}
} catch (e) {
@@ -2588,15 +2601,15 @@ const effects = {
}
},
- async deleteExternalEventType(event_type_name: string | null, user: User | null): Promise {
+ async deleteExternalEventType(eventTypeName: string | null, user: User | null): Promise {
try {
if (!queryPermissions.DELETE_EXTERNAL_EVENT_TYPE(user)) {
throwPermissionError('delete an external event type');
}
// to do this, all dgs associated should be deleted.
- if (event_type_name !== null) {
- const data = await reqHasura<{ id: number }>(gql.DELETE_EXTERNAL_EVENT_TYPE, { name: event_type_name }, user);
+ if (eventTypeName !== null) {
+ const data = await reqHasura<{ id: number }>(gql.DELETE_EXTERNAL_EVENT_TYPE, { name: eventTypeName }, user);
if (data.deleteDerivationGroup === null) {
throw Error('Unable to delete external event type');
}
@@ -2660,7 +2673,7 @@ const effects = {
const data = await reqHasura<{ derivationGroupName: string; sourceKeys: string[] }>(
gql.DELETE_EXTERNAL_SOURCES,
{
- derivationGroupName: derivationGroupName,
+ derivationGroupName,
sourceKeys: derivationGroups[derivationGroupName],
},
user,
@@ -2734,7 +2747,7 @@ const effects = {
const data = await reqHasura<{ id: number }>(gql.DELETE_MODEL, { id }, user);
if (data.deleteModel != null) {
showSuccessToast('Model Deleted Successfully');
- models.filterValueById(id);
+ modelsStore.filterValueById(id);
} else {
throw Error(`Unable to delete model "${model.name}"`);
}
@@ -2761,7 +2774,7 @@ const effects = {
const data = await reqHasura<{ id: number }>(gql.DELETE_PARAMETER_DICTIONARY, { id }, user);
if (data.deleteParameterDictionary != null) {
showSuccessToast('Parameter Dictionary Deleted Successfully');
- parameterDictionaries.filterValueById(id);
+ parameterDictionariesStore.filterValueById(id);
} else {
throw Error(`Unable to delete parameter dictionary with ID: "${id}"`);
}
@@ -2821,17 +2834,17 @@ const effects = {
user,
);
- const { delete_parcel_to_parameter_dictionary } = data;
+ const { delete_parcel_to_parameter_dictionary: deleteParcelToParameterDictionary } = data;
- if (delete_parcel_to_parameter_dictionary != null) {
- const { affected_rows } = delete_parcel_to_parameter_dictionary;
+ if (deleteParcelToParameterDictionary != null) {
+ const { affected_rows: affectedRows } = deleteParcelToParameterDictionary;
- if (affected_rows !== parameterDictionaryIds.length) {
+ if (affectedRows !== parameterDictionaryIds.length) {
throw Error('Some parcel to dictionary associations were not successfully deleted');
}
showSuccessToast('Parcel to dictionary association deleted Successfully');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to delete parcel to dictionary associations');
}
@@ -3056,7 +3069,7 @@ const effects = {
}
showSuccessToast('Sequence Adaptation Deleted Successfully');
- sequenceAdaptations.filterValueById(id);
+ sequenceAdaptationsStore.filterValueById(id);
}
} catch (e) {
catchError('Sequence Adaptation Delete Failed', e as Error);
@@ -3390,7 +3403,7 @@ const effects = {
user: User | null,
): Promise {
try {
- planExpansionStatus.set(Status.Incomplete);
+ planExpansionStatusStore.set(Status.Incomplete);
if (!queryPermissions.EXPAND(user, plan, model)) {
throwPermissionError('expand this plan');
@@ -3398,14 +3411,14 @@ const effects = {
const data = await reqHasura<{ id: number }>(gql.EXPAND, { expansionSetId, simulationDatasetId }, user);
if (data.expand != null) {
- planExpansionStatus.set(Status.Complete);
+ planExpansionStatusStore.set(Status.Complete);
showSuccessToast('Plan Expanded Successfully');
} else {
throw Error('Unable to expand plan');
}
} catch (e) {
catchError('Plan Expansion Failed', e as Error);
- planExpansionStatus.set(Status.Failed);
+ planExpansionStatusStore.set(Status.Failed);
showFailureToast('Plan Expansion Failed');
}
},
@@ -3415,9 +3428,9 @@ const effects = {
const query = convertToQuery(gql.SUB_ACTIVITY_DIRECTIVES);
const data = await reqHasura(query, { planId }, user);
- const { activity_directives } = data;
- if (activity_directives != null) {
- return activity_directives;
+ const { activity_directives: activityDirectives } = data;
+ if (activityDirectives != null) {
+ return activityDirectives;
} else {
throw Error('Unable to retrieve activities for plan');
}
@@ -3501,9 +3514,9 @@ const effects = {
{ modelId },
user,
);
- const { activity_types } = data;
- if (activity_types != null) {
- return activity_types;
+ const { activity_types: activityTypes } = data;
+ if (activityTypes != null) {
+ return activityTypes;
} else {
throw Error('Unable to retrieve activity types');
}
@@ -3645,24 +3658,24 @@ const effects = {
},
async getExpansionSequenceId(
- simulated_activity_id: number,
- simulation_dataset_id: number,
+ simulatedActivityId: number,
+ simulationDatasetId: number,
user: User | null,
): Promise {
try {
const data = await reqHasura(
gql.GET_EXPANSION_SEQUENCE_ID,
{
- simulated_activity_id,
- simulation_dataset_id,
+ simulated_activity_id: simulatedActivityId,
+ simulation_dataset_id: simulationDatasetId,
},
user,
);
const { expansionSequence } = data;
if (expansionSequence) {
- const { seq_id } = expansionSequence;
- return seq_id;
+ const { seq_id: seqId } = expansionSequence;
+ return seqId;
} else {
return null;
}
@@ -3706,7 +3719,7 @@ const effects = {
}
},
- async getExternalEventTypes(plan_id: number, user: User | null): Promise {
+ async getExternalEventTypes(planId: number, user: User | null): Promise {
try {
const sourceData = await reqHasura<
{
@@ -3720,7 +3733,7 @@ const effects = {
}[];
};
}[]
- >(gql.GET_PLAN_EVENT_TYPES, { plan_id }, user);
+ >(gql.GET_PLAN_EVENT_TYPES, { plan_id: planId }, user);
const types: ExternalEventType[] = [];
if (sourceData?.plan_derivation_group !== null) {
for (const group of sourceData.plan_derivation_group) {
@@ -3766,13 +3779,13 @@ const effects = {
{ derivationGroupName: externalSourceDerivationGroup, sourceKey: externalSourceKey },
user,
);
- const { external_source } = data;
- if (external_source != null) {
- const event_types: string[] = [];
- for (const external_event of external_source[0].external_events) {
- event_types.push(external_event.external_event_type.name);
+ const { external_source: externalSource } = data;
+ if (externalSource != null) {
+ const eventTypes: string[] = [];
+ for (const externalEvent of externalSource[0].external_events) {
+ eventTypes.push(externalEvent.external_event_type.name);
}
- return Array.from(new Set(event_types));
+ return Array.from(new Set(eventTypes));
} else {
throw Error('Unable to retrieve external event types for source');
}
@@ -3832,9 +3845,7 @@ const effects = {
if (!queryPermissions.GET_UPLOADED_FILENAME(user)) {
throwPermissionError('get the requested filename');
}
- const data = (await reqHasura<[{ name: string }]>(gql.GET_UPLOADED_FILENAME, { id: fileId }, user))[
- 'uploaded_file'
- ];
+ const data = (await reqHasura<[{ name: string }]>(gql.GET_UPLOADED_FILENAME, { id: fileId }, user)).uploaded_file;
if (data) {
const { name } = data[0];
@@ -3904,14 +3915,14 @@ const effects = {
{ channelDictionaryId },
user,
);
- const { channel_dictionary } = data;
+ const { channel_dictionary: channelDictionary } = data;
- if (!Array.isArray(channel_dictionary) || !channel_dictionary.length) {
+ if (!Array.isArray(channelDictionary) || !channelDictionary.length) {
catchError(`Unable to find channel dictionary with id ${channelDictionaryId}`);
return null;
} else {
- const [{ parsed_json }] = channel_dictionary;
- return parsed_json;
+ const [{ parsed_json: parsedJson }] = channelDictionary;
+ return parsedJson;
}
} catch (e) {
catchError(e as Error);
@@ -3933,14 +3944,14 @@ const effects = {
{ commandDictionaryId },
user,
);
- const { command_dictionary } = data;
+ const { command_dictionary: commandDictionary } = data;
- if (!Array.isArray(command_dictionary) || !command_dictionary.length) {
+ if (!Array.isArray(commandDictionary) || !commandDictionary.length) {
catchError(`Unable to find command dictionary with id ${commandDictionaryId}`);
return null;
} else {
- const [{ parsed_json }] = command_dictionary;
- return parsed_json;
+ const [{ parsed_json: parsedJson }] = commandDictionary;
+ return parsedJson;
}
} catch (e) {
catchError(e as Error);
@@ -3962,14 +3973,14 @@ const effects = {
{ parameterDictionaryId },
user,
);
- const { parameter_dictionary } = data;
+ const { parameter_dictionary: parameterDictionary } = data;
- if (!Array.isArray(parameter_dictionary) || !parameter_dictionary.length) {
+ if (!Array.isArray(parameterDictionary) || !parameterDictionary.length) {
catchError(`Unable to find parameter dictionary with id ${parameterDictionaryId}`);
return null;
} else {
- const [{ parsed_json }] = parameter_dictionary;
- return parsed_json;
+ const [{ parsed_json: parsedJson }] = parameterDictionary;
+ return parsedJson;
}
} catch (e) {
catchError(e as Error);
@@ -4013,12 +4024,12 @@ const effects = {
},
async getPlanMergeConflictingActivities(
- merge_request_id: number,
+ mergeRequestId: number,
user: User | null,
): Promise {
try {
const query = convertToQuery(gql.SUB_PLAN_MERGE_CONFLICTING_ACTIVITIES);
- const data = await reqHasura(query, { merge_request_id }, user);
+ const data = await reqHasura(query, { merge_request_id: mergeRequestId }, user);
const { conflictingActivities } = data;
if (conflictingActivities != null) {
return conflictingActivities;
@@ -4032,14 +4043,14 @@ const effects = {
},
async getPlanMergeNonConflictingActivities(
- merge_request_id: number,
+ mergeRequestId: number,
user: User | null,
): Promise {
try {
const data = await reqHasura(
gql.GET_PLAN_MERGE_NON_CONFLICTING_ACTIVITIES,
{
- merge_request_id,
+ merge_request_id: mergeRequestId,
},
user,
);
@@ -4059,10 +4070,10 @@ const effects = {
try {
const query = convertToQuery(gql.SUB_PLAN_MERGE_REQUEST_IN_PROGRESS);
const data = await reqHasura(query, { planId }, user);
- const { merge_requests } = data;
- if (merge_requests != null) {
- const [merge_request] = merge_requests; // Query uses 'limit: 1' so merge_requests.length === 1.
- return merge_request;
+ const { merge_requests: mergeRequests } = data;
+ if (mergeRequests != null) {
+ const [mergeRequest] = mergeRequests; // Query uses 'limit: 1' so merge_requests.length === 1.
+ return mergeRequest;
} else {
throw Error('Unable to get merge requests in progress');
}
@@ -4102,7 +4113,7 @@ const effects = {
const { plan_snapshot_activity_directives: planSnapshotActivityDirectives } = data;
if (planSnapshotActivityDirectives) {
- return planSnapshotActivityDirectives.map(({ snapshot_id: _snapshot_id, ...planSnapshotActivityDirective }) => {
+ return planSnapshotActivityDirectives.map(({ snapshot_id: _snapshotId, ...planSnapshotActivityDirective }) => {
return {
plan_id: snapshot.plan_id,
...planSnapshotActivityDirective,
@@ -4164,12 +4175,12 @@ const effects = {
return reqHasura(gql.GET_PROFILE, { datasetId, name }, user, signal);
},
- async getResourceTypes(model_id: number, user: User | null, limit: number | null = null): Promise {
+ async getResourceTypes(modelId: number, user: User | null, limit: number | null = null): Promise {
try {
- const data = await reqHasura(gql.GET_RESOURCE_TYPES, { limit, model_id }, user);
- const { resource_types } = data;
- if (resource_types != null) {
- return resource_types;
+ const data = await reqHasura(gql.GET_RESOURCE_TYPES, { limit, model_id: modelId }, user);
+ const { resource_types: resourceTypes } = data;
+ if (resourceTypes != null) {
+ return resourceTypes;
} else {
throw Error('Unable to retrieve resource types');
}
@@ -4204,16 +4215,16 @@ const effects = {
user,
signal,
);
- const { plan_dataset: plan_datasets } = data;
- if (plan_datasets != null) {
+ const { plan_dataset: planDatasets } = data;
+ if (planDatasets != null) {
let resources: Resource[] = [];
const profileMap: Set = new Set();
- plan_datasets.sort(({ dataset_id: datasetIdA }, { dataset_id: datasetIdB }) => {
+ planDatasets.sort(({ dataset_id: datasetIdA }, { dataset_id: datasetIdB }) => {
return compare(datasetIdA, datasetIdB, false);
});
- for (const dataset of plan_datasets) {
+ for (const dataset of planDatasets) {
const {
dataset: { profiles },
offset_from_plan_start,
@@ -4316,20 +4327,20 @@ const effects = {
},
async getSchedulingSpecConditionsForCondition(
- condition_id: number | null,
+ conditionId: number | null,
user: User | null,
): Promise {
- if (condition_id !== null) {
+ if (conditionId !== null) {
try {
const data = await reqHasura(
gql.GET_SCHEDULING_SPEC_CONDITIONS_FOR_CONDITION,
{
- condition_id,
+ condition_id: conditionId,
},
user,
);
- const { scheduling_specification_conditions } = data;
- return scheduling_specification_conditions;
+ const { scheduling_specification_conditions: schedulingSpecificationConditions } = data;
+ return schedulingSpecificationConditions;
} catch (e) {
catchError(e as Error);
return null;
@@ -4340,18 +4351,18 @@ const effects = {
},
async getSchedulingSpecGoalsForGoal(
- goal_id: number | null,
+ goalId: number | null,
user: User | null,
): Promise {
- if (goal_id !== null) {
+ if (goalId !== null) {
try {
const data = await reqHasura(
gql.GET_SCHEDULING_SPEC_GOALS_FOR_GOAL,
- { goal_id },
+ { goal_id: goalId },
user,
);
- const { scheduling_specification_goals } = data;
- return scheduling_specification_goals;
+ const { scheduling_specification_goals: schedulingSpecificationGoals } = data;
+ return schedulingSpecificationGoals;
} catch (e) {
catchError(e as Error);
return null;
@@ -4361,17 +4372,17 @@ const effects = {
}
},
- async getSequenceAdaptation(sequence_adaptation_id: number, user: User | null): Promise {
+ async getSequenceAdaptation(sequenceAdaptationId: number, user: User | null): Promise {
try {
const data = await reqHasura<[sequence_adaptation: SequenceAdaptation]>(
gql.GET_SEQUENCE_ADAPTATION,
- { sequence_adaptation_id },
+ { sequence_adaptation_id: sequenceAdaptationId },
user,
);
- const { sequence_adaptation } = data;
+ const { sequence_adaptation: sequenceAdaptation } = data;
- if (sequence_adaptation && sequence_adaptation.length > 0) {
- return sequence_adaptation[0];
+ if (sequenceAdaptation && sequenceAdaptation.length > 0) {
+ return sequenceAdaptation[0];
}
} catch (e) {
catchError(e as Error);
@@ -4495,10 +4506,14 @@ const effects = {
}
},
- async getTsFilesConstraints(model_id: number, user: User | null): Promise {
- if (model_id !== null && model_id !== undefined) {
+ async getTsFilesConstraints(modelId: number, user: User | null): Promise {
+ if (modelId !== null && modelId !== undefined) {
try {
- const data = await reqHasura(gql.GET_TYPESCRIPT_CONSTRAINTS, { model_id }, user);
+ const data = await reqHasura(
+ gql.GET_TYPESCRIPT_CONSTRAINTS,
+ { model_id: modelId },
+ user,
+ );
const { dslTypeScriptResponse } = data;
if (dslTypeScriptResponse != null) {
const { reason, status, typescriptFiles } = dslTypeScriptResponse;
@@ -4521,10 +4536,10 @@ const effects = {
}
},
- async getTsFilesScheduling(model_id: number | null | undefined, user: User | null): Promise {
- if (model_id !== null && model_id !== undefined) {
+ async getTsFilesScheduling(modelId: number | null | undefined, user: User | null): Promise {
+ if (modelId !== null && modelId !== undefined) {
try {
- const data = await reqHasura(gql.GET_TYPESCRIPT_SCHEDULING, { model_id }, user);
+ const data = await reqHasura(gql.GET_TYPESCRIPT_SCHEDULING, { model_id: modelId }, user);
const { dslTypeScriptResponse } = data;
if (dslTypeScriptResponse != null) {
const { reason, status, typescriptFiles } = dslTypeScriptResponse;
@@ -4728,7 +4743,7 @@ const effects = {
throwPermissionError('import a plan');
}
- creatingPlan.set(true);
+ creatingPlanStore.set(true);
const file: File = files[0];
@@ -4747,7 +4762,7 @@ const effects = {
const createdPlan = await reqGateway('/importPlan', 'POST', body, user, true);
- creatingPlan.set(false);
+ creatingPlanStore.set(false);
if (createdPlan != null) {
return createdPlan;
}
@@ -4755,16 +4770,16 @@ const effects = {
return null;
} catch (e) {
catchError(e as Error);
- creatingPlan.set(false);
+ creatingPlanStore.set(false);
return null;
}
},
async initialSimulationUpdate(
- plan_id: number,
- simulation_template_id: number | null = null,
- simulation_start_time: string | null = null,
- simulation_end_time: string | null = null,
+ planId: number,
+ simulationTemplateId: number | null = null,
+ simulationStartTime: string | null = null,
+ simulationEndTime: string | null = null,
user: User | null,
): Promise {
try {
@@ -4774,13 +4789,13 @@ const effects = {
const simulationInput: SimulationInitialUpdateInput = {
arguments: {} as ArgumentsMap,
- simulation_end_time,
- simulation_start_time,
- simulation_template_id,
+ simulation_end_time: simulationEndTime,
+ simulation_start_time: simulationStartTime,
+ simulation_template_id: simulationTemplateId,
};
const data = await reqHasura<{ returning: { id: number }[] }>(
gql.INITIAL_SIMULATION_UPDATE,
- { plan_id: plan_id, simulation: simulationInput },
+ { plan_id: planId, simulation: simulationInput },
user,
);
if (data.update_simulation != null) {
@@ -4800,7 +4815,7 @@ const effects = {
throwPermissionError('add a derivation group to the plan');
}
- derivationGroupPlanLinkError.set(null);
+ derivationGroupPlanLinkErrorStore.set(null);
if (plan !== null) {
const data = await reqHasura(
gql.CREATE_PLAN_DERIVATION_GROUP,
@@ -4823,14 +4838,14 @@ const effects = {
} catch (e) {
catchError('Derivation Group Linking Failed', e as Error);
showFailureToast('Derivation Group Linking Failed');
- derivationGroupPlanLinkError.set((e as Error).message);
+ derivationGroupPlanLinkErrorStore.set((e as Error).message);
}
},
async insertExpansionSequenceToActivity(
- simulation_dataset_id: number,
- simulated_activity_id: number,
- seq_id: string,
+ simulationDatasetId: number,
+ simulatedActivityId: number,
+ seqId: string,
user: User | null,
): Promise {
try {
@@ -4838,14 +4853,18 @@ const effects = {
throwPermissionError('add an expansion sequence to an activity');
}
- const input: ExpansionSequenceToActivityInsertInput = { seq_id, simulated_activity_id, simulation_dataset_id };
+ const input: ExpansionSequenceToActivityInsertInput = {
+ seq_id: seqId,
+ simulated_activity_id: simulatedActivityId,
+ simulation_dataset_id: simulationDatasetId,
+ };
const data = await reqHasura<{ seq_id: string }>(gql.INSERT_EXPANSION_SEQUENCE_TO_ACTIVITY, { input }, user);
const { sequence } = data;
if (sequence != null) {
showSuccessToast('Expansion Sequence Added To Activity Successfully');
- const { seq_id } = sequence;
- return seq_id;
+ const { seq_id: newSeqId } = sequence;
+ return newSeqId;
} else {
return null;
}
@@ -4976,7 +4995,7 @@ const effects = {
},
async planMergeBegin(
- merge_request_id: number,
+ mergeRequestId: number,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
user: User | null,
@@ -4986,7 +5005,11 @@ const effects = {
throwPermissionError('begin a merge');
}
- const data = await reqHasura<{ merge_request_id: number }>(gql.PLAN_MERGE_BEGIN, { merge_request_id }, user);
+ const data = await reqHasura<{ merge_request_id: number }>(
+ gql.PLAN_MERGE_BEGIN,
+ { merge_request_id: mergeRequestId },
+ user,
+ );
if (data.begin_merge != null) {
return true;
} else {
@@ -5000,7 +5023,7 @@ const effects = {
},
async planMergeCancel(
- merge_request_id: number,
+ mergeRequestId: number,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
user: User | null,
@@ -5010,7 +5033,11 @@ const effects = {
throwPermissionError('cancel this merge request');
}
- const data = await reqHasura<{ merge_request_id: number }>(gql.PLAN_MERGE_CANCEL, { merge_request_id }, user);
+ const data = await reqHasura<{ merge_request_id: number }>(
+ gql.PLAN_MERGE_CANCEL,
+ { merge_request_id: mergeRequestId },
+ user,
+ );
if (data.cancel_merge != null) {
showSuccessToast('Canceled Merge Request');
return true;
@@ -5025,7 +5052,7 @@ const effects = {
},
async planMergeCommit(
- merge_request_id: number,
+ mergeRequestId: number,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
user: User | null,
@@ -5035,7 +5062,11 @@ const effects = {
throwPermissionError('approve this merge request');
}
- const data = await reqHasura<{ merge_request_id: number }>(gql.PLAN_MERGE_COMMIT, { merge_request_id }, user);
+ const data = await reqHasura<{ merge_request_id: number }>(
+ gql.PLAN_MERGE_COMMIT,
+ { merge_request_id: mergeRequestId },
+ user,
+ );
if (data.commit_merge != null) {
showSuccessToast('Approved Merge Request Changes');
return true;
@@ -5050,7 +5081,7 @@ const effects = {
},
async planMergeDeny(
- merge_request_id: number,
+ mergeRequestId: number,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
user: User | null,
@@ -5060,7 +5091,11 @@ const effects = {
throwPermissionError('deny this merge request');
}
- const data = await reqHasura<{ merge_request_id: number }>(gql.PLAN_MERGE_DENY, { merge_request_id }, user);
+ const data = await reqHasura<{ merge_request_id: number }>(
+ gql.PLAN_MERGE_DENY,
+ { merge_request_id: mergeRequestId },
+ user,
+ );
if (data.deny_merge != null) {
showSuccessToast('Denied Merge Request Changes');
return true;
@@ -5075,7 +5110,7 @@ const effects = {
},
async planMergeRequestWithdraw(
- merge_request_id: number,
+ mergeRequestId: number,
sourcePlan: PlanForMerging,
targetPlan: PlanForMerging | undefined,
user: User | null,
@@ -5087,7 +5122,7 @@ const effects = {
const data = await reqHasura<{ merge_request_id: number }>(
gql.PLAN_MERGE_REQUEST_WITHDRAW,
- { merge_request_id },
+ { merge_request_id: mergeRequestId },
user,
);
if (data.withdraw_merge_request != null) {
@@ -5104,7 +5139,7 @@ const effects = {
},
async planMergeResolveAllConflicts(
- merge_request_id: number,
+ mergeRequestId: number,
resolution: PlanMergeResolution,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
@@ -5115,7 +5150,11 @@ const effects = {
throwPermissionError('resolve merge request conflicts');
}
- const data = await reqHasura(gql.PLAN_MERGE_RESOLVE_ALL_CONFLICTS, { merge_request_id, resolution }, user);
+ const data = await reqHasura(
+ gql.PLAN_MERGE_RESOLVE_ALL_CONFLICTS,
+ { merge_request_id: mergeRequestId, resolution },
+ user,
+ );
if (data.set_resolution_bulk == null) {
throw Error('Unable to resolve all merge request conflicts');
}
@@ -5126,8 +5165,8 @@ const effects = {
},
async planMergeResolveConflict(
- merge_request_id: number,
- activity_id: ActivityDirectiveId,
+ mergeRequestId: number,
+ activityId: ActivityDirectiveId,
resolution: PlanMergeResolution,
sourcePlan: PlanForMerging | undefined,
targetPlan: PlanForMerging,
@@ -5140,7 +5179,7 @@ const effects = {
const data = await reqHasura(
gql.PLAN_MERGE_RESOLVE_CONFLICT,
- { activity_id, merge_request_id, resolution },
+ { activity_id: activityId, merge_request_id: mergeRequestId, resolution },
user,
);
if (data.set_resolution == null) {
@@ -5154,8 +5193,8 @@ const effects = {
async removePresetFromActivityDirective(
plan: Plan,
- activity_directive_id: ActivityDirectiveId,
- preset_id: ActivityPresetId,
+ activityDirectiveId: ActivityDirectiveId,
+ presetId: ActivityPresetId,
user: User | null,
): Promise {
try {
@@ -5165,7 +5204,7 @@ const effects = {
const data = await reqHasura<{ preset_id: number }>(
gql.DELETE_PRESET_TO_DIRECTIVE,
- { activity_directive_id, plan_id: plan.id, preset_id },
+ { activity_directive_id: activityDirectiveId, plan_id: plan.id, preset_id: presetId },
user,
);
if (data.delete_preset_to_directive_by_pk != null) {
@@ -5173,7 +5212,7 @@ const effects = {
return true;
} else {
throw Error(
- `Unable to remove activity preset with ID: "${preset_id}" from directive with ID: "${activity_directive_id}"`,
+ `Unable to remove activity preset with ID: "${presetId}" from directive with ID: "${activityDirectiveId}"`,
);
}
} catch (e) {
@@ -5221,15 +5260,15 @@ const effects = {
const { confirm, value } = await showRestorePlanSnapshotModal(
snapshot,
- (get(activityDirectivesDB) || []).length,
+ (get(activityDirectivesDBStore) || []).length,
user,
);
if (confirm) {
if (value && value.shouldCreateSnapshot) {
- const { description, name, snapshot, tags } = value;
+ const { description, name, snapshot: restoredSnapshot, tags } = value;
- await effects.createPlanSnapshotHelper(snapshot.plan_id, name, description, tags, user);
+ await effects.createPlanSnapshotHelper(restoredSnapshot.plan_id, name, description, tags, user);
}
const data = await reqHasura(
@@ -5272,17 +5311,17 @@ const effects = {
const data = await reqGateway('/modelExtraction', 'POST', JSON.stringify({ missionModelId: id }), user, false);
if (data != null) {
const {
- response: { activity_types, model_parameters, resource_types },
+ response: { activity_types: activityTypes, model_parameters: modelParameters, resource_types: resourceTypes },
} = data;
- if (activity_types.error) {
- throw Error(activity_types.error);
+ if (activityTypes.error) {
+ throw Error(activityTypes.error);
}
- if (model_parameters.error) {
- throw Error(model_parameters.error);
+ if (modelParameters.error) {
+ throw Error(modelParameters.error);
}
- if (resource_types.error) {
- throw Error(resource_types.error);
+ if (resourceTypes.error) {
+ throw Error(resourceTypes.error);
}
showSuccessToast('Model Extraction Retriggered Successfully');
@@ -5297,21 +5336,26 @@ const effects = {
return null;
},
- async schedule(analysis_only: boolean = false, plan: Plan | null, user: User | null): Promise {
+ async schedule(analysisOnly: boolean = false, plan: Plan | null, user: User | null): Promise {
try {
if (plan) {
if (
!queryPermissions.UPDATE_SCHEDULING_SPECIFICATION(user, plan) ||
!queryPermissions.SCHEDULE(user, plan, plan.model)
) {
- throwPermissionError(`run ${analysis_only ? 'scheduling analysis' : 'scheduling'}`);
+ throwPermissionError(`run ${analysisOnly ? 'scheduling analysis' : 'scheduling'}`);
}
- const specificationId = get(selectedSpecId);
+ const specificationId = get(selectedSpecIdStore);
if (plan !== null && specificationId !== null) {
- const plan_revision = await effects.getPlanRevision(plan.id, user);
- if (plan_revision !== null) {
- await effects.updateSchedulingSpec(specificationId, { analysis_only, plan_revision }, plan, user);
+ const planRevision = await effects.getPlanRevision(plan.id, user);
+ if (planRevision !== null) {
+ await effects.updateSchedulingSpec(
+ specificationId,
+ { analysis_only: analysisOnly, plan_revision: planRevision },
+ plan,
+ user,
+ );
} else {
throw Error(`Plan revision for plan ${plan.id} was not found.`);
}
@@ -5322,11 +5366,11 @@ const effects = {
const { reason, analysisId } = schedule;
if (reason) {
catchSchedulingError(reason);
- showFailureToast(`Scheduling ${analysis_only ? 'Analysis ' : ''}Failed`);
+ showFailureToast(`Scheduling ${analysisOnly ? 'Analysis ' : ''}Failed`);
return;
}
- const unsubscribe = schedulingRequests.subscribe(async (requests: SchedulingRequest[]) => {
+ const unsubscribe = schedulingRequestsStore.subscribe(async (requests: SchedulingRequest[]) => {
const matchingRequest = requests.find(request => request.analysis_id === analysisId);
if (matchingRequest) {
if (matchingRequest.canceled) {
@@ -5335,7 +5379,7 @@ const effects = {
// If a new simulation was run during scheduling, the response will include a datasetId
// which will need to be cross referenced with a simulation_dataset.id so we
// can load that new simulation. Load the associated sim dataset if it is not already loaded
- const currentSimulationDataset = get(simulationDataset);
+ const currentSimulationDataset = get(simulationDatasetStore);
if (
typeof matchingRequest.dataset_id === 'number' &&
(!currentSimulationDataset || matchingRequest.dataset_id !== currentSimulationDataset.dataset_id)
@@ -5345,24 +5389,24 @@ const effects = {
{ datasetId: matchingRequest.dataset_id },
user,
);
- const { simulation_dataset } = simDatasetIdData;
+ const { simulation_dataset: simulationDataset } = simDatasetIdData;
// the request above will return either 0 or 1 element
- if (Array.isArray(simulation_dataset) && simulation_dataset.length > 0) {
- simulationDatasetId.set(simulation_dataset[0].id);
+ if (Array.isArray(simulationDataset) && simulationDataset.length > 0) {
+ simulationDatasetIdStore.set(simulationDataset[0].id);
}
}
- showSuccessToast(`Scheduling ${analysis_only ? 'Analysis ' : ''}Complete`);
+ showSuccessToast(`Scheduling ${analysisOnly ? 'Analysis ' : ''}Complete`);
unsubscribe();
} else if (matchingRequest.status === 'failed') {
if (matchingRequest.reason) {
catchSchedulingError(matchingRequest.reason);
}
- showFailureToast(`Scheduling ${analysis_only ? 'Analysis ' : ''}Failed`);
+ showFailureToast(`Scheduling ${analysisOnly ? 'Analysis ' : ''}Failed`);
unsubscribe();
}
}
});
- const planIdUnsubscribe = planId.subscribe(currentPlanId => {
+ const planIdUnsubscribe = planIdStore.subscribe(currentPlanId => {
if (currentPlanId < 0 || currentPlanId !== plan.id) {
unsubscribe();
planIdUnsubscribe();
@@ -5404,7 +5448,7 @@ const effects = {
const { simulate } = data;
if (simulate != null) {
const { simulationDatasetId: newSimulationDatasetId } = simulate;
- simulationDatasetId.set(newSimulationDatasetId);
+ simulationDatasetIdStore.set(newSimulationDatasetId);
} else {
throw Error('Unable to simulate this plan');
}
@@ -5475,7 +5519,7 @@ const effects = {
if (data.update_activity_directive_by_pk) {
const { update_activity_directive_by_pk: updatedDirective } = data;
- activityDirectivesDB.updateValue(directives => {
+ activityDirectivesDBStore.updateValue(directives => {
return (directives || []).map(directive => {
if (directive.id === id) {
return updatedDirective;
@@ -5500,7 +5544,7 @@ const effects = {
}
const { id, ...restOfPresetPayload } = updatedActivityPreset;
- const { update_activity_presets_by_pk } = await reqHasura(
+ const { update_activity_presets_by_pk: updateActivityPresetsByPk } = await reqHasura(
gql.UPDATE_ACTIVITY_PRESET,
{
activityPresetSetInput: restOfPresetPayload,
@@ -5509,8 +5553,8 @@ const effects = {
user,
);
- if (update_activity_presets_by_pk != null) {
- const { name: presetName } = update_activity_presets_by_pk;
+ if (updateActivityPresetsByPk != null) {
+ const { name: presetName } = updateActivityPresetsByPk;
showSuccessToast(`Activity Preset ${presetName} Updated Successfully`);
} else {
throw Error(`Unable to update activity preset with ID: "${id}"`);
@@ -5541,11 +5585,11 @@ const effects = {
);
const { deleteConstraintDefinitionTags, insertConstraintDefinitionTags } = data;
if (insertConstraintDefinitionTags != null && deleteConstraintDefinitionTags != null) {
- const { affected_rows } = insertConstraintDefinitionTags;
+ const { affected_rows: affectedRows } = insertConstraintDefinitionTags;
showSuccessToast('Constraint Updated Successfully');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create constraint definition tags');
}
@@ -5675,7 +5719,7 @@ const effects = {
}
},
- async updateDerivationGroupAcknowledged(plan: Plan | undefined, derivation_group_name: string, user: User | null) {
+ async updateDerivationGroupAcknowledged(plan: Plan | undefined, derivationGroupName: string, user: User | null) {
if (plan === undefined) {
return;
}
@@ -5685,7 +5729,7 @@ const effects = {
}
const { updatePlanDerivationGroup: update } = await reqHasura(
gql.UPDATE_DERIVATION_GROUP_ACKNOWLEDGED,
- { acknowledged: true, derivation_group_name, plan_id: plan.id },
+ { acknowledged: true, derivation_group_name: derivationGroupName, plan_id: plan.id },
user,
);
if (update) {
@@ -5700,8 +5744,8 @@ const effects = {
async updateExpansionRule(id: number, rule: ExpansionRuleSetInput, user: User | null): Promise {
try {
- savingExpansionRule.set(true);
- createExpansionRuleError.set(null);
+ savingExpansionRuleStore.set(true);
+ createExpansionRuleErrorStore.set(null);
if (!queryPermissions.UPDATE_EXPANSION_RULE(user, rule)) {
throwPermissionError('update this expansion rule');
@@ -5710,18 +5754,18 @@ const effects = {
const data = await reqHasura(gql.UPDATE_EXPANSION_RULE, { id, rule }, user);
const { updateExpansionRule } = data;
if (updateExpansionRule != null) {
- const { updated_at } = updateExpansionRule;
+ const { updated_at: updatedAt } = updateExpansionRule;
showSuccessToast('Expansion Rule Updated Successfully');
- savingExpansionRule.set(false);
- return updated_at;
+ savingExpansionRuleStore.set(false);
+ return updatedAt;
} else {
throw Error(`Unable to update expansion rule with ID: "${id}"`);
}
} catch (e) {
catchError('Expansion Rule Update Failed', e as Error);
showFailureToast('Expansion Rule Update Failed');
- savingExpansionRule.set(false);
- createExpansionRuleError.set((e as Error).message);
+ savingExpansionRuleStore.set(false);
+ createExpansionRuleErrorStore.set((e as Error).message);
return null;
}
},
@@ -5846,11 +5890,11 @@ const effects = {
);
const { deleteSchedulingConditionDefinitionTags, insertSchedulingConditionDefinitionTags } = data;
if (insertSchedulingConditionDefinitionTags != null && deleteSchedulingConditionDefinitionTags != null) {
- const { affected_rows } = insertSchedulingConditionDefinitionTags;
+ const { affected_rows: affectedRows } = insertSchedulingConditionDefinitionTags;
showSuccessToast('Scheduling Condition Updated Successfully');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create scheduling condition definition tags');
}
@@ -6008,11 +6052,11 @@ const effects = {
);
const { deleteSchedulingGoalDefinitionTags, insertSchedulingGoalDefinitionTags } = data;
if (insertSchedulingGoalDefinitionTags != null && deleteSchedulingGoalDefinitionTags != null) {
- const { affected_rows } = insertSchedulingGoalDefinitionTags;
+ const { affected_rows: affectedRows } = insertSchedulingGoalDefinitionTags;
showSuccessToast('Scheduling Goal Updated Successfully');
- return affected_rows;
+ return affectedRows;
} else {
throw Error('Unable to create scheduling condition definition tags');
}
@@ -6261,7 +6305,7 @@ const effects = {
...(partialSimulationTemplate.model_id && { model_id: partialSimulationTemplate.model_id }),
};
- const { update_simulation_template_by_pk } = await reqHasura(
+ const { update_simulation_template_by_pk: updateSimulationTemplateByPk } = await reqHasura(
gql.UPDATE_SIMULATION_TEMPLATE,
{
id,
@@ -6270,8 +6314,8 @@ const effects = {
user,
);
- if (update_simulation_template_by_pk != null) {
- const { description: templateDescription } = update_simulation_template_by_pk;
+ if (updateSimulationTemplateByPk != null) {
+ const { description: templateDescription } = updateSimulationTemplateByPk;
showSuccessToast(`Simulation Template ${templateDescription} Updated Successfully`);
} else {
throw Error(`Unable to update simulation template with ID: "${id}"`);
@@ -6289,7 +6333,7 @@ const effects = {
notify: boolean = true,
): Promise {
try {
- createTagError.set(null);
+ createTagErrorStore.set(null);
if (!queryPermissions.UPDATE_TAG(user, tagSetInput)) {
throwPermissionError('update tag');
}
@@ -6298,10 +6342,10 @@ const effects = {
if (notify) {
showSuccessToast('Tag Updated Successfully');
}
- createTagError.set(null);
+ createTagErrorStore.set(null);
return updatedTag;
} catch (e) {
- createTagError.set((e as Error).message);
+ createTagErrorStore.set((e as Error).message);
catchError('Update Tags Failed', e as Error);
showFailureToast('Update Tags Failed');
return null;
@@ -6326,9 +6370,9 @@ const effects = {
);
const { updateUserSequence } = data;
if (updateUserSequence != null) {
- const { updated_at } = updateUserSequence;
+ const { updated_at: updatedAt } = updateUserSequence;
showSuccessToast('User Sequence Updated Successfully');
- return updated_at;
+ return updatedAt;
} else {
throw Error(`Unable to update user sequence with ID: "${id}"`);
}
@@ -6492,11 +6536,9 @@ const effects = {
const generatedFilenames: Record = {};
for (const newFile of files) {
const id = originalFilenameToId[newFile.name];
- const response = (await reqHasura<[{ name: string }]>(gql.GET_UPLOADED_FILENAME, { id }, user))[
- 'uploaded_file'
- ];
+ const response = (await reqHasura<[{ name: string }]>(gql.GET_UPLOADED_FILENAME, { id }, user)).uploaded_file;
if (response !== null) {
- generatedFilenames[newFile.name] = `${env.PUBLIC_AERIE_FILE_STORE_PREFIX}${response[0]['name']}`;
+ generatedFilenames[newFile.name] = `${env.PUBLIC_AERIE_FILE_STORE_PREFIX}${response[0].name}`;
}
}
@@ -6522,7 +6564,7 @@ const effects = {
const { newView } = data;
if (newView != null) {
- view.update(() => newView);
+ viewStore.update(() => newView);
setQueryParam(SearchParameters.VIEW_ID, `${newView.id}`);
return true;
} else {
diff --git a/src/utilities/gql.ts b/src/utilities/gql.ts
index 48106e3f29..3031cbce73 100644
--- a/src/utilities/gql.ts
+++ b/src/utilities/gql.ts
@@ -1,244 +1,4 @@
-export enum Queries {
- ACTIVITY_DIRECTIVES = 'activity_directive',
- ACTIVITY_DIRECTIVE_CHANGELOG = 'activity_directive_changelog',
- ACTIVITY_DIRECTIVE_METADATA_SCHEMAS = 'activity_directive_metadata_schema',
- ACTIVITY_DIRECTIVE_VALIDATIONS = 'activity_directive_validations',
- ACTIVITY_PRESETS = 'activity_presets',
- ACTIVITY_TYPES = 'activity_type',
- ADD_EXTERNAL_DATASET = 'addExternalDataset',
- ANCHOR_VALIDATION_STATUS = 'anchor_validation_status',
- APPLY_PRESET_TO_ACTIVITY = 'apply_preset_to_activity',
- BEGIN_MERGE = 'begin_merge',
- CANCEL_MERGE = 'cancel_merge',
- CHANNEL_DICTIONARIES = 'channel_dictionary',
- COMMAND_DICTIONARIES = 'command_dictionary',
- COMMIT_MERGE = 'commit_merge',
- CONSTRAINTS_DSL_TYPESCRIPT = 'constraintsDslTypescript',
- CONSTRAINT_DEFINITION = 'constraint_definition_by_pk',
- CONSTRAINT_METADATA = 'constraint_metadata_by_pk',
- CONSTRAINT_METADATAS = 'constraint_metadata',
- CONSTRAINT_RUN = 'constraint_run',
- CONSTRAINT_SPECIFICATIONS = 'constraint_specification',
- CONSTRAINT_VIOLATIONS = 'constraintViolations',
- CREATE_EXPANSION_SET = 'createExpansionSet',
- CREATE_MERGE_REQUEST = 'create_merge_request',
- CREATE_SNAPSHOT = 'create_snapshot',
- DELETE_ACTIVITY_DELETE_SUBTREE_BULK = 'delete_activity_by_pk_delete_subtree_bulk',
- DELETE_ACTIVITY_DIRECTIVES = 'delete_activity_directive',
- DELETE_ACTIVITY_DIRECTIVE_TAG = 'delete_activity_directive_tags_by_pk', // pluralization is a typo in the db
- DELETE_ACTIVITY_PRESET = 'delete_activity_presets_by_pk',
- DELETE_ACTIVITY_REANCHOR_PLAN_START_BULK = 'delete_activity_by_pk_reanchor_plan_start_bulk',
- DELETE_ACTIVITY_REANCHOR_TO_ANCHOR_BULK = 'delete_activity_by_pk_reanchor_to_anchor_bulk',
- DELETE_CHANNEL_DICTIONARY = 'delete_channel_dictionary_by_pk',
- DELETE_COMMAND_DICTIONARY = 'delete_command_dictionary_by_pk',
- DELETE_CONSTRAINT_DEFINITION_TAGS = 'delete_constraint_definition_tags',
- DELETE_CONSTRAINT_METADATA = 'delete_constraint_metadata_by_pk',
- DELETE_CONSTRAINT_MODEL_SPECIFICATIONS = 'delete_constraint_model_specification',
- DELETE_CONSTRAINT_SPECIFICATIONS = 'delete_constraint_specification',
- DELETE_CONSTRAINT_TAGS = 'delete_constraint_tags',
- DELETE_DERIVATION_GROUP = 'delete_derivation_group',
- DELETE_EXPANSION_RULE = 'delete_expansion_rule_by_pk',
- DELETE_EXPANSION_RULE_TAGS = 'delete_expansion_rule_tags',
- DELETE_EXPANSION_SET = 'delete_expansion_set_by_pk',
- DELETE_EXTERNAL_EVENT = 'delete_external_event',
- DELETE_EXTERNAL_EVENT_TYPE = 'delete_external_event_type_by_pk',
- DELETE_EXTERNAL_SOURCE = 'delete_external_source',
- DELETE_EXTERNAL_SOURCE_TYPE = 'delete_external_source_type_by_pk',
- DELETE_UPLOADED_FILE = 'delete_uploaded_file_by_pk',
- DELETE_MISSION_MODEL = 'delete_mission_model_by_pk',
- DELETE_PARAMETER_DICTIONARY = 'delete_parameter_dictionary_by_pk',
- DELETE_PARCEL = 'delete_parcel_by_pk',
- DELETE_PARCEL_TO_DICTIONARY_ASSOCIATION = 'delete_parcel_to_parameter_dictionary',
- DELETE_PLAN = 'delete_plan_by_pk',
- DELETE_PLAN_COLLABORATOR = 'delete_plan_collaborators_by_pk',
- DELETE_PLAN_DERIVATION_GROUP = 'delete_plan_derivation_group',
- DELETE_PLAN_SNAPSHOT = 'delete_plan_snapshot_by_pk',
- DELETE_PLAN_TAG = 'delete_plan_tags_by_pk', // pluralization is a typo in the db
- DELETE_PRESET_TO_DIRECTIVE = 'delete_preset_to_directive_by_pk',
- DELETE_SCHEDULING_CONDITION_DEFINITION_TAGS = 'delete_scheduling_condition_definition_tags',
- DELETE_SCHEDULING_CONDITION_METADATA = 'delete_scheduling_condition_metadata_by_pk',
- DELETE_SCHEDULING_CONDITION_METADATA_TAGS = 'delete_scheduling_condition_tags',
- DELETE_SCHEDULING_CONDITION_MODEL_SPECIFICATIONS = 'delete_scheduling_model_specification_conditions',
- DELETE_SCHEDULING_GOAL_DEFINITION_TAGS = 'delete_scheduling_goal_definition_tags',
- DELETE_SCHEDULING_GOAL_METADATA = 'delete_scheduling_goal_metadata_by_pk',
- DELETE_SCHEDULING_GOAL_METADATA_TAGS = 'delete_scheduling_goal_tags',
- DELETE_SCHEDULING_GOAL_MODEL_SPECIFICATIONS = 'delete_scheduling_model_specification_goals',
- DELETE_SCHEDULING_SPECIFICATION = 'delete_scheduling_specification',
- DELETE_SCHEDULING_SPECIFICATION_CONDITIONS = 'delete_scheduling_specification_conditions',
- DELETE_SCHEDULING_SPECIFICATION_GOALS = 'delete_scheduling_specification_goals',
- DELETE_SEQUENCE = 'delete_sequence_by_pk',
- DELETE_SEQUENCE_ADAPTATION = 'delete_sequence_adaptation_by_pk',
- DELETE_SEQUENCE_TO_SIMULATED_ACTIVITY = 'delete_sequence_to_simulated_activity_by_pk',
- DELETE_SIMULATION_TEMPLATE = 'delete_simulation_template_by_pk',
- DELETE_TAG = 'delete_tags_by_pk',
- DELETE_USER_SEQUENCE = 'delete_user_sequence_by_pk',
- DELETE_VIEW = 'delete_view_by_pk',
- DELETE_VIEWS = 'delete_view',
- DENY_MERGE = 'deny_merge',
- DUPLICATE_PLAN = 'duplicate_plan',
- EVENT = 'event',
- EXPAND_ALL_ACTIVITIES = 'expandAllActivities',
- EXPANSION_RULE = 'expansion_rule_by_pk',
- EXPANSION_RULES = 'expansion_rule',
- EXPANSION_RULE_TAGS = 'expansion_rule_tags',
- EXPANSION_RUNS = 'expansion_run',
- EXPANSION_SETS = 'expansion_set',
- EXTENSIONS = 'extensions',
- DERIVED_EVENTS = 'derived_events',
- EXTERNAL_EVENT = 'external_event',
- EXTERNAL_EVENT_TYPES = 'external_event_type',
- EXTERNAL_SOURCE_EVENT_TYPES = 'external_source_event_type',
- EXTERNAL_SOURCE = 'external_source_by_pk',
- EXTERNAL_SOURCES = 'external_source',
- EXTERNAL_SOURCE_TYPES = 'external_source_type',
- DERIVATION_GROUP = 'derivation_group',
- DERIVATION_GROUP_COMP = 'derivation_group_comp',
- PLAN_DERIVATION_GROUP = 'plan_derivation_group',
- GET_ACTIVITY_EFFECTIVE_ARGUMENTS_BULK = 'getActivityEffectiveArgumentsBulk',
- GET_ACTIVITY_TYPE_SCRIPT = 'getActivityTypeScript',
- GET_COMMAND_TYPE_SCRIPT = 'getCommandTypeScript',
- GET_CONFLICTING_ACTIVITIES = 'get_conflicting_activities',
- GET_EDSL_FOR_SEQ_JSON = 'getEdslForSeqJson',
- GET_MODEL_EFFECTIVE_ARGUMENTS = 'getModelEffectiveArguments',
- GET_NON_CONFLICTING_ACTIVITIES = 'get_non_conflicting_activities',
- GET_SEQUENCE_SEQ_JSON = 'getSequenceSeqJson',
- GET_USER_SEQUENCE_SEQ_JSON = 'getUserSequenceSeqJson',
- INSERT_ACTIVITY_DIRECTIVE = 'insert_activity_directive_one',
- INSERT_ACTIVITY_DIRECTIVES = 'insert_activity_directive',
- INSERT_ACTIVITY_DIRECTIVE_TAGS = 'insert_activity_directive_tags',
- INSERT_ACTIVITY_PRESET = 'insert_activity_presets_one',
- INSERT_CHANNEL_DICTIONARY = 'insert_channel_dictionary_one',
- INSERT_DERIVATION_GROUP = 'insert_derivation_group_one',
- INSERT_DICTIONARY = 'insert_dictionary_one',
- INSERT_CONSTRAINT_DEFINITION = 'insert_constraint_definition_one',
- INSERT_CONSTRAINT_DEFINITION_TAGS = 'insert_constraint_definition_tags',
- INSERT_CONSTRAINT_METADATA = 'insert_constraint_metadata_one',
- INSERT_CONSTRAINT_MODEL_SPECIFICATION = 'insert_constraint_model_specification_one',
- INSERT_CONSTRAINT_MODEL_SPECIFICATIONS = 'insert_constraint_model_specification',
- INSERT_CONSTRAINT_SPECIFICATIONS = 'insert_constraint_specification',
- INSERT_CONSTRAINT_TAGS = 'insert_constraint_tags',
- INSERT_EXPANSION_RULE = 'insert_expansion_rule_one',
- INSERT_EXPANSION_RULE_TAGS = 'insert_expansion_rule_tags',
- INSERT_EXTERNAL_EVENT_TYPE = 'insert_external_event_type',
- INSERT_EXTERNAL_EVENT_TYPE_ONE = 'insert_external_event_type_one',
- INSERT_EXTERNAL_SOURCE = 'insert_external_source_one',
- INSERT_EXTERNAL_SOURCE_TYPE = 'insert_external_source_type_one',
- INSERT_MISSION_MODEL = 'insert_mission_model_one',
- INSERT_PARAMETER_DICTIONARY = 'insert_parameter_dictionary_one',
- INSERT_PARCEL = 'insert_parcel_one',
- INSERT_PARCEL_TO_PARAMETER_DICTIONARY = 'insert_parcel_to_parameter_dictionary',
- INSERT_PLAN = 'insert_plan_one',
- INSERT_PLAN_DERIVATION_GROUP = 'insert_plan_derivation_group_one',
- INSERT_PLAN_SNAPSHOT_TAGS = 'insert_plan_snapshot_tags',
- INSERT_PLAN_COLLABORATORS = 'insert_plan_collaborators',
- INSERT_PLAN_TAGS = 'insert_plan_tags',
- INSERT_SCHEDULING_CONDITION_DEFINITION = 'insert_scheduling_condition_definition_one',
- INSERT_SCHEDULING_CONDITION_DEFINITION_TAGS = 'insert_scheduling_condition_definition_tags',
- INSERT_SCHEDULING_CONDITION_METADATA = 'insert_scheduling_condition_metadata_one',
- INSERT_SCHEDULING_CONDITION_TAGS = 'insert_scheduling_condition_tags',
- INSERT_SCHEDULING_GOAL_DEFINITION = 'insert_scheduling_goal_definition_one',
- INSERT_SCHEDULING_GOAL_DEFINITION_TAGS = 'insert_scheduling_goal_definition_tags',
- INSERT_SCHEDULING_GOAL_METADATA = 'insert_scheduling_goal_metadata_one',
- INSERT_SCHEDULING_GOAL_TAGS = 'insert_scheduling_goal_tags',
- INSERT_SCHEDULING_MODEL_SPECIFICATION_CONDITIONS = 'insert_scheduling_model_specification_conditions',
- INSERT_SCHEDULING_MODEL_SPECIFICATION_GOALS = 'insert_scheduling_model_specification_goals',
- INSERT_SCHEDULING_SPECIFICATION = 'insert_scheduling_specification_one',
- INSERT_SCHEDULING_SPECIFICATION_CONDITION = 'insert_scheduling_specification_conditions_one',
- INSERT_SCHEDULING_SPECIFICATION_CONDITIONS = 'insert_scheduling_specification_conditions',
- INSERT_SCHEDULING_SPECIFICATION_GOAL = 'insert_scheduling_specification_goals_one',
- INSERT_SCHEDULING_SPECIFICATION_GOALS = 'insert_scheduling_specification_goals',
- INSERT_SEQUENCE = 'insert_sequence_one',
- INSERT_SEQUENCE_ADAPTATION = 'insert_sequence_adaptation_one',
- INSERT_SEQUENCE_TO_SIMULATED_ACTIVITY = 'insert_sequence_to_simulated_activity_one',
- INSERT_SIMULATION_TEMPLATE = 'insert_simulation_template_one',
- INSERT_TAG = 'insert_tags_one',
- INSERT_TAGS = 'insert_tags',
- INSERT_USER_SEQUENCE = 'insert_user_sequence_one',
- INSERT_VIEW = 'insert_view_one',
- INSERT_WORKSPACE = 'insert_workspace_one',
- MERGE_REQUEST = 'merge_request_by_pk',
- MERGE_REQUESTS = 'merge_request',
- MISSION_MODEL = 'mission_model_by_pk',
- MISSION_MODELS = 'mission_model',
- PARAMETER_DICTIONARIES = 'parameter_dictionary',
- PARCEL = 'parcel_by_pk',
- PARCELS = 'parcel',
- PARCEL_TO_PARAMETER_DICTIONARY = 'parcel_to_parameter_dictionary',
- PLAN = 'plan_by_pk',
- PLANS = 'plan',
- PLAN_DATASETS = 'plan_dataset',
- PLAN_SNAPSHOTS = 'plan_snapshot',
- PLAN_SNAPSHOT_ACTIVITIES = 'plan_snapshot_activities',
- PROFILES = 'profile',
- RESOURCE_TYPES = 'resource_type',
- RESTORE_ACTIVITY_FROM_CHANGELOG = 'restoreActivityFromChangelog',
- RESTORE_FROM_SNAPSHOT = 'restore_from_snapshot',
- SCHEDULE = 'schedule',
- SCHEDULING_CONDITION_METADATA = 'scheduling_condition_metadata_by_pk',
- SCHEDULING_CONDITION_METADATAS = 'scheduling_condition_metadata',
- SCHEDULING_DSL_TYPESCRIPT = 'schedulingDslTypescript',
- SCHEDULING_GOAL_METADATA = 'scheduling_goal_metadata_by_pk',
- SCHEDULING_GOAL_METADATAS = 'scheduling_goal_metadata',
- SCHEDULING_REQUESTS = 'scheduling_request',
- SCHEDULING_SPECIFICATION = 'scheduling_specification_by_pk',
- SCHEDULING_SPECIFICATION_CONDITIONS = 'scheduling_specification_conditions',
- SCHEDULING_SPECIFICATION_GOALS = 'scheduling_specification_goals',
- SEEN_SOURCES = 'seen_sources',
- SEQUENCE = 'sequence',
- SEQUENCE_ADAPTATION = 'sequence_adaptation',
- SEQUENCE_TO_SIMULATED_ACTIVITY = 'sequence_to_simulated_activity_by_pk',
- SET_RESOLUTION = 'set_resolution',
- SET_RESOLUTIONS = 'set_resolution_bulk',
- SIMULATE = 'simulate',
- SIMULATIONS = 'simulation',
- SIMULATION_DATASET = 'simulation_dataset_by_pk',
- SIMULATION_DATASETS = 'simulation_dataset',
- SIMULATION_TEMPLATES = 'simulation_template',
- SPANS = 'span',
- TAGS = 'tags',
- TOPIC = 'topic',
- UPDATE_ACTIVITY_DIRECTIVE = 'update_activity_directive_by_pk',
- UPDATE_ACTIVITY_DIRECTIVES = 'update_activity_directive_many',
- UPDATE_ACTIVITY_PRESET = 'update_activity_presets_by_pk',
- UPDATE_CONSTRAINT_METADATA = 'update_constraint_metadata_by_pk',
- UPDATE_CONSTRAINT_SPECIFICATION = 'update_constraint_specification_by_pk',
- UPDATE_CONSTRAINT_MODEL_SPECIFICATION = 'update_constraint_model_specification_by_pk',
- UPDATE_DERIVATION_GROUP_ACKNOWLEDGED = 'update_plan_derivation_group_by_pk',
- UPDATE_EXPANSION_RULE = 'update_expansion_rule_by_pk',
- UPDATE_MISSION_MODEL = 'update_mission_model_by_pk',
- UPDATE_PARCEL = 'update_parcel_by_pk',
- UPDATE_PLAN_SNAPSHOT = 'update_plan_snapshot_by_pk',
- UPDATE_PLAN = 'update_plan_by_pk',
- UPDATE_SCHEDULING_CONDITION_METADATA = 'update_scheduling_condition_metadata_by_pk',
- UPDATE_SCHEDULING_GOAL_METADATA = 'update_scheduling_goal_metadata_by_pk',
- UPDATE_SCHEDULING_REQUEST = 'update_scheduling_request',
- UPDATE_SCHEDULING_SPECIFICATION = 'update_scheduling_specification_by_pk',
- UPDATE_SCHEDULING_SPECIFICATION_CONDITION = 'update_scheduling_specification_conditions_by_pk',
- UPDATE_SCHEDULING_SPECIFICATION_GOAL = 'update_scheduling_specification_goals_by_pk',
- UPDATE_SCHEDULING_CONDITION_MODEL_SPECIFICATION = 'update_scheduling_model_specification_conditions_by_pk',
- UPDATE_SCHEDULING_CONDITION_MODEL_SPECIFICATIONS = 'update_scheduling_model_specification_conditions',
- UPDATE_SCHEDULING_GOAL_MODEL_SPECIFICATION = 'update_scheduling_model_specification_goals_by_pk',
- UPDATE_SCHEDULING_GOAL_MODEL_SPECIFICATIONS = 'update_scheduling_model_specification_goals',
- UPDATE_SIMULATION = 'update_simulation_by_pk',
- UPDATE_SIMULATIONS = 'update_simulation',
- UPDATE_SIMULATION_DATASET = 'update_simulation_dataset_by_pk',
- UPDATE_SIMULATION_TEMPLATE = 'update_simulation_template_by_pk',
- UPDATE_TAGS = 'update_tags_by_pk',
- UPDATE_USER_SEQUENCE = 'update_user_sequence_by_pk',
- UPDATE_VIEW = 'update_view_by_pk',
- UPDATE_WORKSPACE = 'update_workspace_by_pk',
- UPLOADED_FILES = 'uploaded_file',
- UPLOAD_DICTIONARY = 'uploadDictionary',
- USER_ROLE_PERMISSION = 'user_role_permission',
- USER_SEQUENCE = 'user_sequence_by_pk',
- USER_SEQUENCES = 'user_sequence',
- USERS = 'users',
- VALIDATE_ACTIVITY_ARGUMENTS = 'validateActivityArguments',
- VIEW = 'view_by_pk',
- VIEWS = 'view',
- WORKSPACES = 'workspace',
- WITHDRAW_MERGE_REQUEST = 'withdraw_merge_request',
-}
+import { Queries } from '../enums/gql';
/**
* GraphQL Query, Mutation, and Subscription strings.
diff --git a/src/utilities/permissions.ts b/src/utilities/permissions.ts
index c8e0505937..b0b3fd32c0 100644
--- a/src/utilities/permissions.ts
+++ b/src/utilities/permissions.ts
@@ -1,4 +1,5 @@
import { base } from '$app/paths';
+import { Queries } from '../enums/gql';
import type { ActivityDirective, ActivityPreset } from '../types/activity';
import type { User, UserRole } from '../types/app';
import type { ReqAuthResponse } from '../types/auth';
@@ -27,7 +28,7 @@ import type { Parcel, UserSequence, Workspace } from '../types/sequencing';
import type { PlanDataset, Simulation, SimulationTemplate } from '../types/simulation';
import type { Tag } from '../types/tags';
import type { View, ViewSlim } from '../types/view';
-import gql, { Queries } from './gql';
+import gql from './gql';
import { showFailureToast } from './toast';
export const ADMIN_ROLE = 'aerie_admin';
diff --git a/src/utilities/sequence-editor/command-dictionary.ts b/src/utilities/sequence-editor/command-dictionary.ts
index 96d2162acc..023856f895 100644
--- a/src/utilities/sequence-editor/command-dictionary.ts
+++ b/src/utilities/sequence-editor/command-dictionary.ts
@@ -18,15 +18,15 @@ import { logError } from './logger';
* Return a default argument for a given argument definition.
*/
export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap: EnumMap): string {
- const { arg_type } = fswCommandArg;
+ const { arg_type: argType } = fswCommandArg;
- switch (arg_type) {
+ switch (argType) {
case 'boolean': {
const booleanArg = fswCommandArg as FswCommandArgumentBoolean;
- const { default_value } = booleanArg;
+ const { default_value: defaultValue } = booleanArg;
- if (default_value !== null) {
- return default_value.toLowerCase();
+ if (defaultValue !== null) {
+ return defaultValue.toLowerCase();
} else {
return 'false';
}
@@ -42,10 +42,10 @@ export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap:
return '""';
case 'float': {
const floatArg = fswCommandArg as FswCommandArgumentFloat;
- const { default_value, range } = floatArg;
+ const { default_value: defaultValue, range } = floatArg;
- if (default_value !== null) {
- return `${default_value}`;
+ if (defaultValue !== null) {
+ return `${defaultValue}`;
} else if (range !== null) {
const { min } = range;
return `${min}`;
@@ -55,10 +55,10 @@ export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap:
}
case 'integer': {
const intArg = fswCommandArg as FswCommandArgumentInteger;
- const { default_value, range } = intArg;
+ const { default_value: defaultValue, range } = intArg;
- if (default_value !== null) {
- return `${default_value}`;
+ if (defaultValue !== null) {
+ return `${defaultValue}`;
} else if (range !== null) {
const { min } = range;
return `${min}`;
@@ -68,10 +68,10 @@ export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap:
}
case 'numeric': {
const numericArg = fswCommandArg as FswCommandArgumentNumeric;
- const { default_value, range } = numericArg;
+ const { default_value: defaultValue, range } = numericArg;
- if (default_value !== null) {
- return `${default_value}`;
+ if (defaultValue !== null) {
+ return `${defaultValue}`;
} else if (range !== null) {
const { min } = range;
return `${min}`;
@@ -120,10 +120,10 @@ export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap:
return '0';
case 'unsigned': {
const numericArg = fswCommandArg as FswCommandArgumentUnsigned;
- const { default_value, range } = numericArg;
+ const { default_value: defaultValue, range } = numericArg;
- if (default_value !== null) {
- return `${default_value}`;
+ if (defaultValue !== null) {
+ return `${defaultValue}`;
} else if (range !== null) {
const { min } = range;
return `${min}`;
@@ -133,10 +133,10 @@ export function fswCommandArgDefault(fswCommandArg: FswCommandArgument, enumMap:
}
case 'var_string': {
const varStringArg = fswCommandArg as FswCommandArgumentVarString;
- const { default_value } = varStringArg;
+ const { default_value: defaultValue } = varStringArg;
- if (default_value) {
- return default_value;
+ if (defaultValue) {
+ return defaultValue;
} else {
return '""';
}
diff --git a/src/utilities/sequence-editor/extension-points.ts b/src/utilities/sequence-editor/extension-points.ts
index cb5e782ee2..f0785d9cb9 100644
--- a/src/utilities/sequence-editor/extension-points.ts
+++ b/src/utilities/sequence-editor/extension-points.ts
@@ -84,15 +84,15 @@ export function inputLinter(
librarySequences: LibrarySequence[] = [],
): Extension {
return linter(view => {
- const inputLinter = get(sequenceAdaptation).inputFormat.linter;
+ const inputFormatLinter = get(sequenceAdaptation).inputFormat.linter;
const tree = syntaxTree(view.state);
const treeNode = tree.topNode;
let diagnostics: Diagnostic[];
diagnostics = sequenceLinter(view, channelDictionary, commandDictionary, parameterDictionaries, librarySequences);
- if (inputLinter !== undefined && commandDictionary !== null) {
- diagnostics = inputLinter(diagnostics, commandDictionary, view, treeNode);
+ if (inputFormatLinter !== undefined && commandDictionary !== null) {
+ diagnostics = inputFormatLinter(diagnostics, commandDictionary, view, treeNode);
}
return diagnostics;
@@ -106,13 +106,13 @@ export function outputLinter(
return linter(view => {
const tree = syntaxTree(view.state);
const treeNode = tree.topNode;
- const outputLinter = outputFormat?.linter;
+ const outputFormatLinter = outputFormat?.linter;
let diagnostics: Diagnostic[];
diagnostics = seqJsonLinter(view, commandDictionary);
- if (outputLinter !== undefined && commandDictionary !== null) {
- diagnostics = outputLinter(diagnostics, commandDictionary, view, treeNode);
+ if (outputFormatLinter !== undefined && commandDictionary !== null) {
+ diagnostics = outputFormatLinter(diagnostics, commandDictionary, view, treeNode);
}
return diagnostics;
diff --git a/src/utilities/sequence-editor/from-seq-json.ts b/src/utilities/sequence-editor/from-seq-json.ts
index 94e6bc7a29..5dce337665 100644
--- a/src/utilities/sequence-editor/from-seq-json.ts
+++ b/src/utilities/sequence-editor/from-seq-json.ts
@@ -125,7 +125,7 @@ function seqJsonVariableToSequence(
sequence += variables
.map(variable => {
const name = variable.name;
- const type = variable.type ? ` ${variable.type}` : '';
+ const variableType = variable.type ? ` ${variable.type}` : '';
const enumName = variable.enum_name ? ` ${variable.enum_name}` : '';
const allowableRanges = variable.allowable_ranges
? ` "${variable.allowable_ranges.map(range => `${range.min}...${range.max}`).join(',')}"`
@@ -133,7 +133,7 @@ function seqJsonVariableToSequence(
const allowableValues = variable.allowable_values
? ` ${allowableRanges.length === 0 ? '"" ' : ''}"${variable.allowable_values.map(value => `${value}`).join(',')}"`
: '';
- return `${name}${type}${enumName}${allowableRanges}${allowableValues}`;
+ return `${name}${variableType}${enumName}${allowableRanges}${allowableValues}`;
})
.join('\n');
diff --git a/src/utilities/sequence-editor/sequence-linter.ts b/src/utilities/sequence-editor/sequence-linter.ts
index f8521c89eb..e628fea27a 100644
--- a/src/utilities/sequence-editor/sequence-linter.ts
+++ b/src/utilities/sequence-editor/sequence-linter.ts
@@ -153,8 +153,8 @@ export function sequenceLinter(
),
);
diagnostics.push(
- ...validateActivateLoad(commandsNode.getChildren(TOKEN_ACTIVATE), 'Activate', docText, librarySequences),
- ...validateActivateLoad(commandsNode.getChildren(TOKEN_LOAD), 'Load', docText, librarySequences),
+ ...validateActivateLoad(commandsNode.getChildren(TOKEN_ACTIVATE), docText, librarySequences),
+ ...validateActivateLoad(commandsNode.getChildren(TOKEN_LOAD), docText, librarySequences),
);
}
@@ -183,9 +183,7 @@ export function sequenceLinter(
),
);
- diagnostics.push(
- ...conditionalAndLoopKeywordsLinter(treeNode.getChild('Commands')?.getChildren(TOKEN_COMMAND) ?? [], view.state),
- );
+ diagnostics.push(...conditionalAndLoopKeywordsLinter(view.state));
return diagnostics;
}
@@ -215,7 +213,7 @@ function validateParserErrors(tree: Tree) {
return diagnostics;
}
-function conditionalAndLoopKeywordsLinter(commandNodes: SyntaxNode[], state: EditorState): Diagnostic[] {
+function conditionalAndLoopKeywordsLinter(state: EditorState): Diagnostic[] {
const diagnostics: Diagnostic[] = [];
const blocks = computeBlocks(state);
@@ -372,10 +370,10 @@ export function validateVariables(inputParams: SyntaxNode[], text: string, type:
const rangeNode = parameter.getChild('Range');
const objectNode = parameter.getChild('Object');
- const { enumName, name, range, type } = getVariableInfo(parameter, text);
+ const { enumName, name, range, type: variableType } = getVariableInfo(parameter, text);
- if (type) {
- if (['FLOAT', 'INT', 'STRING', 'UINT', 'ENUM'].includes(type) === false) {
+ if (variableType) {
+ if (['FLOAT', 'INT', 'STRING', 'UINT', 'ENUM'].includes(variableType) === false) {
const node = typeNode ?? objectNode ?? parameter;
const { from, to } = node;
diagnostics.push({
@@ -384,7 +382,7 @@ export function validateVariables(inputParams: SyntaxNode[], text: string, type:
severity: 'error',
to,
});
- } else if (type.toLocaleLowerCase() === 'enum' && !enumName) {
+ } else if (variableType.toLocaleLowerCase() === 'enum' && !enumName) {
const node = typeNode ?? objectNode ?? parameter;
const { from, to } = node;
diagnostics.push({
@@ -393,7 +391,7 @@ export function validateVariables(inputParams: SyntaxNode[], text: string, type:
severity: 'error',
to,
});
- } else if (type.toLocaleLowerCase() !== 'enum' && enumName) {
+ } else if (variableType.toLocaleLowerCase() !== 'enum' && enumName) {
const node = enumNode ?? objectNode ?? parameter;
const { from, to } = node;
diagnostics.push({
@@ -402,7 +400,7 @@ export function validateVariables(inputParams: SyntaxNode[], text: string, type:
severity: 'error',
to,
});
- } else if (type.toLocaleLowerCase() === 'string' && range) {
+ } else if (variableType.toLocaleLowerCase() === 'string' && range) {
const node = rangeNode ?? objectNode ?? parameter;
const { from, to } = node;
diagnostics.push({
@@ -416,7 +414,7 @@ export function validateVariables(inputParams: SyntaxNode[], text: string, type:
const variable = {
name,
- type,
+ type: variableType,
} as VariableDeclaration;
variables.push(variable);
@@ -503,19 +501,14 @@ function getVariableInfo(
};
}
-function validateActivateLoad(
- node: SyntaxNode[],
- type: 'Activate' | 'Load',
- text: string,
- librarySequences: LibrarySequence[],
-): Diagnostic[] {
+function validateActivateLoad(node: SyntaxNode[], text: string, librarySequences: LibrarySequence[]): Diagnostic[] {
if (node.length === 0) {
return [];
}
const diagnostics: Diagnostic[] = [];
- node.forEach(activate => {
+ node.forEach((activate: SyntaxNode) => {
const sequenceName = activate.getChild(RULE_SEQUENCE_NAME);
const argNode = activate.getChild(RULE_ARGS);
@@ -523,7 +516,7 @@ function validateActivateLoad(
return;
}
const library = librarySequences.find(
- library => library.name === text.slice(sequenceName.from, sequenceName.to).replace(/^"|"$/g, ''),
+ sequence => sequence.name === text.slice(sequenceName.from, sequenceName.to).replace(/^"|"$/g, ''),
);
const argsNode = getChildrenNode(argNode);
if (!library) {
@@ -539,7 +532,7 @@ function validateActivateLoad(
});
if (structureError) {
diagnostics.push(structureError);
- return diagnostics;
+ return;
}
library?.parameters.forEach((parameter, index) => {
@@ -658,8 +651,8 @@ function validateCustomDirectives(node: SyntaxNode, text: string): Diagnostic[]
diagnostics.push({
actions: [
{
- apply(view, from, to) {
- view.dispatch({ changes: { from, insert, to } });
+ apply(view, diagnosticsFrom, diagnosticsTo) {
+ view.dispatch({ changes: { from: diagnosticsFrom, insert, to: diagnosticsTo } });
},
name: `Change to ${guess}`,
},
@@ -1271,8 +1264,8 @@ function validateCommandStructure(
return {
actions: [
{
- apply(view, from, to) {
- view.dispatch({ changes: { from, to } });
+ apply(view, argsFrom, argsTo) {
+ view.dispatch({ changes: { from: argsFrom, to: argsTo } });
},
name: `Remove ${extraArgs.length} extra ${commandArgs}`,
},
@@ -1596,8 +1589,8 @@ function validateId(commandNode: SyntaxNode, text: string): Diagnostic[] {
actions: idValNode
? [
{
- apply(view, from, to) {
- view.dispatch({ changes: { from, insert: quoteEscape(idVal), to } });
+ apply(view, diagnosticsFrom, diagnosticsTo) {
+ view.dispatch({ changes: { from: diagnosticsFrom, insert: quoteEscape(idVal), to: diagnosticsTo } });
},
name: `Quote ${idVal}`,
},
@@ -1671,10 +1664,10 @@ function validateMetadata(commandNode: SyntaxNode): Diagnostic[] {
// Get the name of the template node
const templateName = metadataTemplate[i];
// Get the metadata node of the current template node
- const metadataNode = metadataNodeChildren[i];
+ const metadataNodeChild = metadataNodeChildren[i];
// If there is no metadata node, add a diagnostic
- if (!metadataNode) {
+ if (!metadataNodeChild) {
diagnostics.push({
actions: [],
from: entry.from,
@@ -1686,9 +1679,9 @@ function validateMetadata(commandNode: SyntaxNode): Diagnostic[] {
}
// If the name of the metadata node is not the template node name
- if (metadataNode.name !== templateName) {
+ if (metadataNodeChild.name !== templateName) {
// Get the name of the deepest node of the metadata node
- const deepestNodeName = getDeepestNode(metadataNode).name;
+ const deepestNodeName = getDeepestNode(metadataNodeChild).name;
// Add a diagnostic based on the name of the deepest node
switch (deepestNodeName) {
case 'String':
@@ -1697,10 +1690,10 @@ function validateMetadata(commandNode: SyntaxNode): Diagnostic[] {
case 'Enum':
case 'Boolean':
diagnostics.push({
- from: metadataNode.from,
+ from: metadataNodeChild.from,
message: `Incorrect type - expected 'String' but got ${deepestNodeName}`,
severity: 'error',
- to: metadataNode.to,
+ to: metadataNodeChild.to,
});
break;
default:
diff --git a/src/utilities/sequence-editor/to-seq-json.ts b/src/utilities/sequence-editor/to-seq-json.ts
index 1481fde702..fdc721e0f1 100644
--- a/src/utilities/sequence-editor/to-seq-json.ts
+++ b/src/utilities/sequence-editor/to-seq-json.ts
@@ -97,11 +97,11 @@ export async function sequenceToSeqJson(
}
function parseRequest(requestNode: SyntaxNode, text: string, commandDictionary: CommandDictionary | null): Request {
- let ground_epoch = undefined;
+ let groundEpoch = undefined;
let time = undefined;
const groundEpochNode = requestNode.getChild('TimeTag')?.getChild('TimeGroundEpoch');
if (groundEpochNode) {
- ground_epoch = parseGroundEpoch(requestNode.getChild('TimeTag'), text);
+ groundEpoch = parseGroundEpoch(requestNode.getChild('TimeTag'), text);
} else {
time = parseTime(requestNode, text);
}
@@ -136,7 +136,7 @@ function parseRequest(requestNode: SyntaxNode, text: string, commandDictionary:
// ground epoch
return {
description,
- ground_epoch,
+ ground_epoch: groundEpoch,
metadata,
name,
steps: steps as [Step, ...Step[]],
@@ -473,8 +473,8 @@ function parseTime(commandNode: SyntaxNode, text: string): Time {
}
if (timeTagAbsoluteNode) {
- const tag = text.slice(timeTagAbsoluteNode.from + 1, timeTagAbsoluteNode.to).trim();
- return { tag, type: 'ABSOLUTE' };
+ const absoluteTag = text.slice(timeTagAbsoluteNode.from + 1, timeTagAbsoluteNode.to).trim();
+ return { tag: absoluteTag, type: 'ABSOLUTE' };
} else if (timeTagEpochNode) {
const timeTagEpochText = text.slice(timeTagEpochNode.from + 1, timeTagEpochNode.to).trim();
diff --git a/vite.worker-build-plugin.js b/vite.worker-build-plugin.js
index e6f274c235..d054ae111e 100644
--- a/vite.worker-build-plugin.js
+++ b/vite.worker-build-plugin.js
@@ -12,7 +12,7 @@ import { writeFile } from 'fs/promises';
* @returns {string[]}
*/
export function normalizePaths(root, path) {
- return (Array.isArray(path) ? path : [path]).map(path => resolve(root, path)).map(normalizePath);
+ return (Array.isArray(path) ? path : [path]).map(subpath => resolve(root, subpath)).map(normalizePath);
}
/**
@@ -37,7 +37,7 @@ export const WorkerBuildPlugin = (paths, config) => ({
}
const root = process.cwd();
const { outdir = './static' } = config;
- let files = normalizePaths(root, paths);
+ const files = normalizePaths(root, paths);
// If npm run build, make an optimized build
const ctx = await esbuild.context({