mirror of
https://github.com/lensapp/lens.git
synced 2025-05-20 05:10:56 +00:00
Enforce semicolons in eslint
Signed-off-by: Panu Horsmalahti <phorsmalahti@mirantis.com>
This commit is contained in:
parent
0f3f5611ba
commit
1477bb8274
@ -20,6 +20,7 @@ module.exports = {
|
||||
rules: {
|
||||
"indent": ["error", 2],
|
||||
"no-unused-vars": "off",
|
||||
"semi": ["error", "always"],
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -47,7 +48,8 @@ module.exports = {
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/ban-ts-comment": "off",
|
||||
"@typescript-eslint/no-empty-interface": "off",
|
||||
"indent": ["error", 2]
|
||||
"indent": ["error", 2],
|
||||
"semi": ["error", "always"],
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -75,7 +77,8 @@ module.exports = {
|
||||
"@typescript-eslint/explicit-module-boundary-types": "off",
|
||||
"@typescript-eslint/ban-types": "off",
|
||||
"@typescript-eslint/no-empty-function": "off",
|
||||
"indent": ["error", 2]
|
||||
"indent": ["error", 2],
|
||||
"semi": ["error", "always"],
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
@ -38,6 +38,7 @@
|
||||
"download:helm": "yarn run ts-node build/download_helm.ts",
|
||||
"build:tray-icons": "yarn run ts-node build/build_tray_icon.ts",
|
||||
"lint": "yarn run eslint $@ --ext js,ts,tsx --max-warnings=0 src/",
|
||||
"lint:fix": "yarn run lint --fix",
|
||||
"mkdocs-serve-local": "docker build -t mkdocs-serve-local:latest mkdocs/ && docker run --rm -it -p 8000:8000 -v ${PWD}:/docs mkdocs-serve-local:latest",
|
||||
"typedocs-extensions-api": "yarn run typedoc --ignoreCompilerErrors --readme docs/extensions/typedoc-readme.md.tpl --name @k8slens/extensions --out docs/extensions/api --mode library --excludePrivate --hideBreadcrumbs --includes src/ src/extensions/extension-api.ts"
|
||||
},
|
||||
|
||||
@ -5,9 +5,9 @@ import { Cluster } from "../../main/cluster";
|
||||
import { ClusterStore } from "../cluster-store";
|
||||
import { workspaceStore } from "../workspace-store";
|
||||
|
||||
const testDataIcon = fs.readFileSync("test-data/cluster-store-migration-icon.png")
|
||||
const testDataIcon = fs.readFileSync("test-data/cluster-store-migration-icon.png");
|
||||
|
||||
console.log("") // fix bug
|
||||
console.log(""); // fix bug
|
||||
|
||||
let clusterStore: ClusterStore;
|
||||
|
||||
@ -18,15 +18,15 @@ describe("empty config", () => {
|
||||
'tmp': {
|
||||
'lens-cluster-store.json': JSON.stringify({})
|
||||
}
|
||||
}
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
describe("with foo cluster added", () => {
|
||||
beforeEach(() => {
|
||||
@ -43,30 +43,30 @@ describe("empty config", () => {
|
||||
workspace: workspaceStore.currentWorkspaceId
|
||||
})
|
||||
);
|
||||
})
|
||||
});
|
||||
|
||||
it("adds new cluster to store", async () => {
|
||||
const storedCluster = clusterStore.getById("foo");
|
||||
expect(storedCluster.id).toBe("foo");
|
||||
expect(storedCluster.preferences.terminalCWD).toBe("/tmp");
|
||||
expect(storedCluster.preferences.icon).toBe("data:image/jpeg;base64, iVBORw0KGgoAAAANSUhEUgAAA1wAAAKoCAYAAABjkf5");
|
||||
})
|
||||
});
|
||||
|
||||
it("adds cluster to default workspace", () => {
|
||||
const storedCluster = clusterStore.getById("foo");
|
||||
expect(storedCluster.workspace).toBe("default");
|
||||
})
|
||||
});
|
||||
|
||||
it("removes cluster from store", async () => {
|
||||
await clusterStore.removeById("foo");
|
||||
expect(clusterStore.getById("foo")).toBeUndefined();
|
||||
})
|
||||
});
|
||||
|
||||
it("sets active cluster", () => {
|
||||
clusterStore.setActive("foo");
|
||||
expect(clusterStore.active.id).toBe("foo");
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("with prod and dev clusters added", () => {
|
||||
beforeEach(() => {
|
||||
@ -89,8 +89,8 @@ describe("empty config", () => {
|
||||
kubeConfigPath: ClusterStore.embedCustomKubeConfig("dev", "fancy config"),
|
||||
workspace: "workstation"
|
||||
})
|
||||
)
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("check if store can contain multiple clusters", () => {
|
||||
expect(clusterStore.hasClusters()).toBeTruthy();
|
||||
@ -104,42 +104,42 @@ describe("empty config", () => {
|
||||
expect(wsClusters.length).toBe(2);
|
||||
expect(wsClusters[0].id).toBe("prod");
|
||||
expect(wsClusters[1].id).toBe("dev");
|
||||
})
|
||||
});
|
||||
|
||||
it("check if cluster's kubeconfig file saved", () => {
|
||||
const file = ClusterStore.embedCustomKubeConfig("boo", "kubeconfig");
|
||||
expect(fs.readFileSync(file, "utf8")).toBe("kubeconfig");
|
||||
})
|
||||
});
|
||||
|
||||
it("check if reorderring works for same from and to", () => {
|
||||
clusterStore.swapIconOrders("workstation", 1, 1)
|
||||
clusterStore.swapIconOrders("workstation", 1, 1);
|
||||
|
||||
const clusters = clusterStore.getByWorkspaceId("workstation");
|
||||
expect(clusters[0].id).toBe("prod")
|
||||
expect(clusters[0].preferences.iconOrder).toBe(0)
|
||||
expect(clusters[1].id).toBe("dev")
|
||||
expect(clusters[1].preferences.iconOrder).toBe(1)
|
||||
})
|
||||
expect(clusters[0].id).toBe("prod");
|
||||
expect(clusters[0].preferences.iconOrder).toBe(0);
|
||||
expect(clusters[1].id).toBe("dev");
|
||||
expect(clusters[1].preferences.iconOrder).toBe(1);
|
||||
});
|
||||
|
||||
it("check if reorderring works for different from and to", () => {
|
||||
clusterStore.swapIconOrders("workstation", 0, 1)
|
||||
clusterStore.swapIconOrders("workstation", 0, 1);
|
||||
|
||||
const clusters = clusterStore.getByWorkspaceId("workstation");
|
||||
expect(clusters[0].id).toBe("dev")
|
||||
expect(clusters[0].preferences.iconOrder).toBe(0)
|
||||
expect(clusters[1].id).toBe("prod")
|
||||
expect(clusters[1].preferences.iconOrder).toBe(1)
|
||||
})
|
||||
expect(clusters[0].id).toBe("dev");
|
||||
expect(clusters[0].preferences.iconOrder).toBe(0);
|
||||
expect(clusters[1].id).toBe("prod");
|
||||
expect(clusters[1].preferences.iconOrder).toBe(1);
|
||||
});
|
||||
|
||||
it("check if after icon reordering, changing workspaces still works", () => {
|
||||
clusterStore.swapIconOrders("workstation", 1, 1)
|
||||
clusterStore.getById("prod").workspace = "default"
|
||||
clusterStore.swapIconOrders("workstation", 1, 1);
|
||||
clusterStore.getById("prod").workspace = "default";
|
||||
|
||||
expect(clusterStore.getByWorkspaceId("workstation").length).toBe(1);
|
||||
expect(clusterStore.getByWorkspaceId("default").length).toBe(1);
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("config with existing clusters", () => {
|
||||
beforeEach(() => {
|
||||
@ -176,21 +176,21 @@ describe("config with existing clusters", () => {
|
||||
]
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("allows to retrieve a cluster", () => {
|
||||
const storedCluster = clusterStore.getById('cluster1');
|
||||
expect(storedCluster.id).toBe('cluster1');
|
||||
expect(storedCluster.preferences.terminalCWD).toBe('/foo');
|
||||
})
|
||||
});
|
||||
|
||||
it("allows to delete a cluster", () => {
|
||||
clusterStore.removeById('cluster2');
|
||||
@ -198,18 +198,18 @@ describe("config with existing clusters", () => {
|
||||
expect(storedCluster).toBeTruthy();
|
||||
const storedCluster2 = clusterStore.getById('cluster2');
|
||||
expect(storedCluster2).toBeUndefined();
|
||||
})
|
||||
});
|
||||
|
||||
it("allows getting all of the clusters", async () => {
|
||||
const storedClusters = clusterStore.clustersList;
|
||||
expect(storedClusters.length).toBe(3)
|
||||
expect(storedClusters[0].id).toBe('cluster1')
|
||||
expect(storedClusters[0].preferences.terminalCWD).toBe('/foo')
|
||||
expect(storedClusters[1].id).toBe('cluster2')
|
||||
expect(storedClusters[1].preferences.terminalCWD).toBe('/foo2')
|
||||
expect(storedClusters[2].id).toBe('cluster3')
|
||||
})
|
||||
})
|
||||
expect(storedClusters.length).toBe(3);
|
||||
expect(storedClusters[0].id).toBe('cluster1');
|
||||
expect(storedClusters[0].preferences.terminalCWD).toBe('/foo');
|
||||
expect(storedClusters[1].id).toBe('cluster2');
|
||||
expect(storedClusters[1].preferences.terminalCWD).toBe('/foo2');
|
||||
expect(storedClusters[2].id).toBe('cluster3');
|
||||
});
|
||||
});
|
||||
|
||||
describe("pre 2.0 config with an existing cluster", () => {
|
||||
beforeEach(() => {
|
||||
@ -229,17 +229,17 @@ describe("pre 2.0 config with an existing cluster", () => {
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("migrates to modern format with kubeconfig in a file", async () => {
|
||||
const config = clusterStore.clustersList[0].kubeConfigPath;
|
||||
expect(fs.readFileSync(config, "utf8")).toBe("kubeconfig content");
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("pre 2.6.0 config with a cluster that has arrays in auth config", () => {
|
||||
beforeEach(() => {
|
||||
@ -257,15 +257,15 @@ describe("pre 2.6.0 config with a cluster that has arrays in auth config", () =>
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("replaces array format access token and expiry into string", async () => {
|
||||
const file = clusterStore.clustersList[0].kubeConfigPath;
|
||||
@ -273,8 +273,8 @@ describe("pre 2.6.0 config with a cluster that has arrays in auth config", () =>
|
||||
const kc = yaml.safeLoad(config);
|
||||
expect(kc.users[0].user['auth-provider'].config['access-token']).toBe("should be string");
|
||||
expect(kc.users[0].user['auth-provider'].config['expiry']).toBe("should be string");
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("pre 2.6.0 config with a cluster icon", () => {
|
||||
beforeEach(() => {
|
||||
@ -297,23 +297,23 @@ describe("pre 2.6.0 config with a cluster icon", () => {
|
||||
}),
|
||||
"icon_path": testDataIcon,
|
||||
}
|
||||
}
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("moves the icon into preferences", async () => {
|
||||
const storedClusterData = clusterStore.clustersList[0];
|
||||
expect(storedClusterData.hasOwnProperty('icon')).toBe(false);
|
||||
expect(storedClusterData.preferences.hasOwnProperty('icon')).toBe(true);
|
||||
expect(storedClusterData.preferences.icon.startsWith("data:;base64,")).toBe(true);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("for a pre 2.7.0-beta.0 config without a workspace", () => {
|
||||
beforeEach(() => {
|
||||
@ -334,21 +334,21 @@ describe("for a pre 2.7.0-beta.0 config without a workspace", () => {
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("adds cluster to default workspace", async () => {
|
||||
const storedClusterData = clusterStore.clustersList[0];
|
||||
expect(storedClusterData.workspace).toBe('default');
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("pre 3.6.0-beta.1 config with an existing cluster", () => {
|
||||
beforeEach(() => {
|
||||
@ -378,19 +378,19 @@ describe("pre 3.6.0-beta.1 config with an existing cluster", () => {
|
||||
mockFs(mockOpts);
|
||||
clusterStore = ClusterStore.getInstance<ClusterStore>();
|
||||
return clusterStore.load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore();
|
||||
})
|
||||
});
|
||||
|
||||
it("migrates to modern format with kubeconfig in a file", async () => {
|
||||
const config = clusterStore.clustersList[0].kubeConfigPath;
|
||||
expect(fs.readFileSync(config, "utf8")).toBe("kubeconfig content");
|
||||
})
|
||||
});
|
||||
|
||||
it("migrates to modern format with icon not in file", async () => {
|
||||
const { icon } = clusterStore.clustersList[0].preferences;
|
||||
expect(icon.startsWith("data:;base64,")).toBe(true);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
import { appEventBus, AppEvent } from "../event-bus"
|
||||
import { appEventBus, AppEvent } from "../event-bus";
|
||||
|
||||
describe("event bus tests", () => {
|
||||
describe("emit", () => {
|
||||
it("emits an event", () => {
|
||||
let event: AppEvent = null
|
||||
let event: AppEvent = null;
|
||||
appEventBus.addListener((data) => {
|
||||
event = data
|
||||
})
|
||||
event = data;
|
||||
});
|
||||
|
||||
appEventBus.emit({name: "foo", action: "bar"})
|
||||
expect(event.name).toBe("foo")
|
||||
})
|
||||
})
|
||||
})
|
||||
appEventBus.emit({name: "foo", action: "bar"});
|
||||
expect(event.name).toBe("foo");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
* @jest-environment jsdom
|
||||
*/
|
||||
|
||||
import { SearchStore } from "../search-store"
|
||||
import { SearchStore } from "../search-store";
|
||||
|
||||
let searchStore: SearchStore = null;
|
||||
|
||||
@ -10,17 +10,17 @@ const logs = [
|
||||
"1:M 30 Oct 2020 16:17:41.553 # Connection with replica 172.17.0.12:6379 lost",
|
||||
"1:M 30 Oct 2020 16:17:41.623 * Replica 172.17.0.12:6379 asks for synchronization",
|
||||
"1:M 30 Oct 2020 16:17:41.623 * Starting Partial resynchronization request from 172.17.0.12:6379 accepted. Sending 0 bytes of backlog starting from offset 14407."
|
||||
]
|
||||
];
|
||||
|
||||
describe("search store tests", () => {
|
||||
beforeEach(async () => {
|
||||
searchStore = new SearchStore();
|
||||
})
|
||||
});
|
||||
|
||||
it("does nothing with empty search query", () => {
|
||||
searchStore.onSearch([], "");
|
||||
expect(searchStore.occurrences).toEqual([]);
|
||||
})
|
||||
});
|
||||
|
||||
it("doesn't break if no text provided", () => {
|
||||
searchStore.onSearch(null, "replica");
|
||||
@ -28,53 +28,53 @@ describe("search store tests", () => {
|
||||
|
||||
searchStore.onSearch([], "replica");
|
||||
expect(searchStore.occurrences).toEqual([]);
|
||||
})
|
||||
});
|
||||
|
||||
it("find 3 occurences across 3 lines", () => {
|
||||
searchStore.onSearch(logs, "172");
|
||||
expect(searchStore.occurrences).toEqual([0, 1, 2]);
|
||||
})
|
||||
});
|
||||
|
||||
it("find occurences within 1 line (case-insensitive)", () => {
|
||||
searchStore.onSearch(logs, "Starting");
|
||||
expect(searchStore.occurrences).toEqual([2, 2]);
|
||||
})
|
||||
});
|
||||
|
||||
it("sets overlay index equal to first occurence", () => {
|
||||
searchStore.onSearch(logs, "Replica");
|
||||
expect(searchStore.activeOverlayIndex).toBe(0);
|
||||
})
|
||||
});
|
||||
|
||||
it("set overlay index to next occurence", () => {
|
||||
searchStore.onSearch(logs, "172");
|
||||
searchStore.setNextOverlayActive();
|
||||
expect(searchStore.activeOverlayIndex).toBe(1);
|
||||
})
|
||||
});
|
||||
|
||||
it("sets overlay to last occurence", () => {
|
||||
searchStore.onSearch(logs, "172");
|
||||
searchStore.setPrevOverlayActive();
|
||||
expect(searchStore.activeOverlayIndex).toBe(2);
|
||||
})
|
||||
});
|
||||
|
||||
it("gets line index where overlay is located", () => {
|
||||
searchStore.onSearch(logs, "synchronization");
|
||||
expect(searchStore.activeOverlayLine).toBe(1);
|
||||
})
|
||||
});
|
||||
|
||||
it("escapes string for using in regex", () => {
|
||||
const regex = searchStore.escapeRegex("some.interesting-query\\#?()[]");
|
||||
expect(regex).toBe("some\\.interesting\\-query\\\\\\#\\?\\(\\)\\[\\]");
|
||||
})
|
||||
});
|
||||
|
||||
it("gets active find number", () => {
|
||||
searchStore.onSearch(logs, "172");
|
||||
searchStore.setNextOverlayActive();
|
||||
expect(searchStore.activeFind).toBe(2);
|
||||
})
|
||||
});
|
||||
|
||||
it("gets total finds number", () => {
|
||||
searchStore.onSearch(logs, "Starting");
|
||||
expect(searchStore.totalFinds).toBe(2);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
@ -1,4 +1,4 @@
|
||||
import mockFs from "mock-fs"
|
||||
import mockFs from "mock-fs";
|
||||
|
||||
jest.mock("electron", () => {
|
||||
return {
|
||||
@ -7,55 +7,55 @@ jest.mock("electron", () => {
|
||||
getPath: () => 'tmp',
|
||||
getLocale: () => 'en'
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
import { UserStore } from "../user-store"
|
||||
import { SemVer } from "semver"
|
||||
import electron from "electron"
|
||||
import { UserStore } from "../user-store";
|
||||
import { SemVer } from "semver";
|
||||
import electron from "electron";
|
||||
|
||||
describe("user store tests", () => {
|
||||
describe("for an empty config", () => {
|
||||
beforeEach(() => {
|
||||
UserStore.resetInstance()
|
||||
mockFs({ tmp: { 'config.json': "{}" } })
|
||||
})
|
||||
UserStore.resetInstance();
|
||||
mockFs({ tmp: { 'config.json': "{}" } });
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("allows setting and retrieving lastSeenAppVersion", () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
|
||||
us.lastSeenAppVersion = "1.2.3";
|
||||
expect(us.lastSeenAppVersion).toBe("1.2.3");
|
||||
})
|
||||
});
|
||||
|
||||
it("allows adding and listing seen contexts", () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
|
||||
us.seenContexts.add('foo')
|
||||
expect(us.seenContexts.size).toBe(1)
|
||||
us.seenContexts.add('foo');
|
||||
expect(us.seenContexts.size).toBe(1);
|
||||
|
||||
us.seenContexts.add('foo')
|
||||
us.seenContexts.add('bar')
|
||||
expect(us.seenContexts.size).toBe(2) // check 'foo' isn't added twice
|
||||
expect(us.seenContexts.has('foo')).toBe(true)
|
||||
expect(us.seenContexts.has('bar')).toBe(true)
|
||||
})
|
||||
us.seenContexts.add('foo');
|
||||
us.seenContexts.add('bar');
|
||||
expect(us.seenContexts.size).toBe(2); // check 'foo' isn't added twice
|
||||
expect(us.seenContexts.has('foo')).toBe(true);
|
||||
expect(us.seenContexts.has('bar')).toBe(true);
|
||||
});
|
||||
|
||||
it("allows setting and getting preferences", () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
|
||||
us.preferences.httpsProxy = 'abcd://defg';
|
||||
|
||||
expect(us.preferences.httpsProxy).toBe('abcd://defg')
|
||||
expect(us.preferences.colorTheme).toBe(UserStore.defaultTheme)
|
||||
expect(us.preferences.httpsProxy).toBe('abcd://defg');
|
||||
expect(us.preferences.colorTheme).toBe(UserStore.defaultTheme);
|
||||
|
||||
us.preferences.colorTheme = "light";
|
||||
expect(us.preferences.colorTheme).toBe('light')
|
||||
})
|
||||
expect(us.preferences.colorTheme).toBe('light');
|
||||
});
|
||||
|
||||
it("correctly resets theme to default value", async () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
@ -64,7 +64,7 @@ describe("user store tests", () => {
|
||||
us.preferences.colorTheme = "some other theme";
|
||||
await us.resetTheme();
|
||||
expect(us.preferences.colorTheme).toBe(UserStore.defaultTheme);
|
||||
})
|
||||
});
|
||||
|
||||
it("correctly calculates if the last seen version is an old release", () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
@ -73,12 +73,12 @@ describe("user store tests", () => {
|
||||
|
||||
us.lastSeenAppVersion = (new SemVer(electron.app.getVersion())).inc("major").format();
|
||||
expect(us.isNewVersion).toBe(false);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("migrations", () => {
|
||||
beforeEach(() => {
|
||||
UserStore.resetInstance()
|
||||
UserStore.resetInstance();
|
||||
mockFs({
|
||||
'tmp': {
|
||||
'config.json': JSON.stringify({
|
||||
@ -87,17 +87,17 @@ describe("user store tests", () => {
|
||||
lastSeenAppVersion: '1.2.3'
|
||||
})
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("sets last seen app version to 0.0.0", () => {
|
||||
const us = UserStore.getInstance<UserStore>();
|
||||
|
||||
expect(us.lastSeenAppVersion).toBe('0.0.0')
|
||||
})
|
||||
})
|
||||
})
|
||||
expect(us.lastSeenAppVersion).toBe('0.0.0');
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -1,4 +1,4 @@
|
||||
import mockFs from "mock-fs"
|
||||
import mockFs from "mock-fs";
|
||||
|
||||
jest.mock("electron", () => {
|
||||
return {
|
||||
@ -7,36 +7,36 @@ jest.mock("electron", () => {
|
||||
getPath: () => 'tmp',
|
||||
getLocale: () => 'en'
|
||||
}
|
||||
}
|
||||
})
|
||||
};
|
||||
});
|
||||
|
||||
import { Workspace, WorkspaceStore } from "../workspace-store"
|
||||
import { Workspace, WorkspaceStore } from "../workspace-store";
|
||||
|
||||
describe("workspace store tests", () => {
|
||||
describe("for an empty config", () => {
|
||||
beforeEach(async () => {
|
||||
WorkspaceStore.resetInstance()
|
||||
mockFs({ tmp: { 'lens-workspace-store.json': "{}" } })
|
||||
WorkspaceStore.resetInstance();
|
||||
mockFs({ tmp: { 'lens-workspace-store.json': "{}" } });
|
||||
|
||||
await WorkspaceStore.getInstance<WorkspaceStore>().load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("default workspace should always exist", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
|
||||
expect(ws.workspaces.size).toBe(1);
|
||||
expect(ws.getById(WorkspaceStore.defaultId)).not.toBe(null);
|
||||
})
|
||||
});
|
||||
|
||||
it("cannot remove the default workspace", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
|
||||
expect(() => ws.removeWorkspaceById(WorkspaceStore.defaultId)).toThrowError("Cannot remove");
|
||||
})
|
||||
});
|
||||
|
||||
it("can update workspace description", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -50,7 +50,7 @@ describe("workspace store tests", () => {
|
||||
ws.updateWorkspace(workspace);
|
||||
|
||||
expect(ws.getById("foobar").description).toBe("Foobar description");
|
||||
})
|
||||
});
|
||||
|
||||
it("can add workspaces", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -61,13 +61,13 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(ws.getById("123").name).toBe("foobar");
|
||||
})
|
||||
});
|
||||
|
||||
it("cannot set a non-existent workspace to be active", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
|
||||
expect(() => ws.setActive("abc")).toThrow("doesn't exist");
|
||||
})
|
||||
});
|
||||
|
||||
it("can set a existent workspace to be active", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -78,7 +78,7 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(() => ws.setActive("abc")).not.toThrowError();
|
||||
})
|
||||
});
|
||||
|
||||
it("can remove a workspace", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -94,7 +94,7 @@ describe("workspace store tests", () => {
|
||||
ws.removeWorkspaceById("123");
|
||||
|
||||
expect(ws.workspaces.size).toBe(2);
|
||||
})
|
||||
});
|
||||
|
||||
it("cannot create workspace with existent name", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -105,7 +105,7 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(ws.workspacesList.length).toBe(1); // default workspace only
|
||||
})
|
||||
});
|
||||
|
||||
it("cannot create workspace with empty name", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -116,7 +116,7 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(ws.workspacesList.length).toBe(1); // default workspace only
|
||||
})
|
||||
});
|
||||
|
||||
it("cannot create workspace with ' ' name", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -127,7 +127,7 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(ws.workspacesList.length).toBe(1); // default workspace only
|
||||
})
|
||||
});
|
||||
|
||||
it("trim workspace name", () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
@ -138,12 +138,12 @@ describe("workspace store tests", () => {
|
||||
}));
|
||||
|
||||
expect(ws.workspacesList.length).toBe(1); // default workspace only
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("for a non-empty config", () => {
|
||||
beforeEach(async () => {
|
||||
WorkspaceStore.resetInstance()
|
||||
WorkspaceStore.resetInstance();
|
||||
mockFs({
|
||||
tmp: {
|
||||
'lens-workspace-store.json': JSON.stringify({
|
||||
@ -157,19 +157,19 @@ describe("workspace store tests", () => {
|
||||
}]
|
||||
})
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
await WorkspaceStore.getInstance<WorkspaceStore>().load();
|
||||
})
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("doesn't revert to default workspace", async () => {
|
||||
const ws = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
|
||||
expect(ws.currentWorkspaceId).toBe("abc");
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import path from "path"
|
||||
import Config from "conf"
|
||||
import { Options as ConfOptions } from "conf/dist/source/types"
|
||||
import { app, ipcMain, IpcMainEvent, ipcRenderer, IpcRendererEvent, remote } from "electron"
|
||||
import path from "path";
|
||||
import Config from "conf";
|
||||
import { Options as ConfOptions } from "conf/dist/source/types";
|
||||
import { app, ipcMain, IpcMainEvent, ipcRenderer, IpcRendererEvent, remote } from "electron";
|
||||
import { action, IReactionOptions, observable, reaction, runInAction, toJS, when } from "mobx";
|
||||
import Singleton from "./utils/singleton";
|
||||
import { getAppVersion } from "./utils/app-version";
|
||||
@ -32,7 +32,7 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
autoLoad: false,
|
||||
syncEnabled: true,
|
||||
...params,
|
||||
}
|
||||
};
|
||||
this.init();
|
||||
}
|
||||
|
||||
@ -41,11 +41,11 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
}
|
||||
|
||||
protected get syncRendererChannel() {
|
||||
return `store-sync-renderer:${this.path}`
|
||||
return `store-sync-renderer:${this.path}`;
|
||||
}
|
||||
|
||||
protected get syncMainChannel() {
|
||||
return `store-sync-main:${this.path}`
|
||||
return `store-sync-main:${this.path}`;
|
||||
}
|
||||
|
||||
get path() {
|
||||
@ -76,7 +76,7 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
}
|
||||
|
||||
protected cwd() {
|
||||
return (app || remote.app).getPath("userData")
|
||||
return (app || remote.app).getPath("userData");
|
||||
}
|
||||
|
||||
protected async saveToFile(model: T) {
|
||||
@ -96,7 +96,7 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
logger.silly(`[STORE]: SYNC ${this.name} from renderer`, { model });
|
||||
this.onSync(model);
|
||||
};
|
||||
subscribeToBroadcast(this.syncMainChannel, callback)
|
||||
subscribeToBroadcast(this.syncMainChannel, callback);
|
||||
this.syncDisposers.push(() => unsubscribeFromBroadcast(this.syncMainChannel, callback));
|
||||
}
|
||||
if (ipcRenderer) {
|
||||
@ -104,20 +104,20 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
logger.silly(`[STORE]: SYNC ${this.name} from main`, { model });
|
||||
this.onSyncFromMain(model);
|
||||
};
|
||||
subscribeToBroadcast(this.syncRendererChannel, callback)
|
||||
subscribeToBroadcast(this.syncRendererChannel, callback);
|
||||
this.syncDisposers.push(() => unsubscribeFromBroadcast(this.syncRendererChannel, callback));
|
||||
}
|
||||
}
|
||||
|
||||
protected onSyncFromMain(model: T) {
|
||||
this.applyWithoutSync(() => {
|
||||
this.onSync(model)
|
||||
})
|
||||
this.onSync(model);
|
||||
});
|
||||
}
|
||||
|
||||
unregisterIpcListener() {
|
||||
ipcRenderer.removeAllListeners(this.syncMainChannel)
|
||||
ipcRenderer.removeAllListeners(this.syncRendererChannel)
|
||||
ipcRenderer.removeAllListeners(this.syncMainChannel);
|
||||
ipcRenderer.removeAllListeners(this.syncRendererChannel);
|
||||
}
|
||||
|
||||
disableSync() {
|
||||
@ -143,9 +143,9 @@ export abstract class BaseStore<T = any> extends Singleton {
|
||||
protected async onModelChange(model: T) {
|
||||
if (ipcMain) {
|
||||
this.saveToFile(model); // save config file
|
||||
broadcastMessage(this.syncRendererChannel, model)
|
||||
broadcastMessage(this.syncRendererChannel, model);
|
||||
} else {
|
||||
broadcastMessage(this.syncMainChannel, model)
|
||||
broadcastMessage(this.syncMainChannel, model);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
import { observable } from "mobx"
|
||||
import { observable } from "mobx";
|
||||
|
||||
export const clusterFrameMap = observable.map<string, number>();
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
import { handleRequest } from "./ipc";
|
||||
import { ClusterId, clusterStore } from "./cluster-store";
|
||||
import { appEventBus } from "./event-bus"
|
||||
import { appEventBus } from "./event-bus";
|
||||
import { ResourceApplier } from "../main/resource-applier";
|
||||
import { ipcMain } from "electron";
|
||||
import { clusterFrameMap } from "./cluster-frames"
|
||||
import { clusterFrameMap } from "./cluster-frames";
|
||||
|
||||
export const clusterActivateHandler = "cluster:activate"
|
||||
export const clusterSetFrameIdHandler = "cluster:set-frame-id"
|
||||
export const clusterRefreshHandler = "cluster:refresh"
|
||||
export const clusterDisconnectHandler = "cluster:disconnect"
|
||||
export const clusterKubectlApplyAllHandler = "cluster:kubectl-apply-all"
|
||||
export const clusterActivateHandler = "cluster:activate";
|
||||
export const clusterSetFrameIdHandler = "cluster:set-frame-id";
|
||||
export const clusterRefreshHandler = "cluster:refresh";
|
||||
export const clusterDisconnectHandler = "cluster:disconnect";
|
||||
export const clusterKubectlApplyAllHandler = "cluster:kubectl-apply-all";
|
||||
|
||||
|
||||
if (ipcMain) {
|
||||
@ -18,38 +18,38 @@ if (ipcMain) {
|
||||
if (cluster) {
|
||||
return cluster.activate(force);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
handleRequest(clusterSetFrameIdHandler, (event, clusterId: ClusterId, frameId: number) => {
|
||||
const cluster = clusterStore.getById(clusterId);
|
||||
if (cluster) {
|
||||
clusterFrameMap.set(cluster.id, frameId)
|
||||
clusterFrameMap.set(cluster.id, frameId);
|
||||
return cluster.pushState();
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
handleRequest(clusterRefreshHandler, (event, clusterId: ClusterId) => {
|
||||
const cluster = clusterStore.getById(clusterId);
|
||||
if (cluster) return cluster.refresh({ refreshMetadata: true })
|
||||
})
|
||||
if (cluster) return cluster.refresh({ refreshMetadata: true });
|
||||
});
|
||||
|
||||
handleRequest(clusterDisconnectHandler, (event, clusterId: ClusterId) => {
|
||||
appEventBus.emit({name: "cluster", action: "stop"});
|
||||
const cluster = clusterStore.getById(clusterId);
|
||||
if (cluster) {
|
||||
cluster.disconnect();
|
||||
clusterFrameMap.delete(cluster.id)
|
||||
clusterFrameMap.delete(cluster.id);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
handleRequest(clusterKubectlApplyAllHandler, (event, clusterId: ClusterId, resources: string[]) => {
|
||||
appEventBus.emit({name: "cluster", action: "kubectl-apply-all"})
|
||||
appEventBus.emit({name: "cluster", action: "kubectl-apply-all"});
|
||||
const cluster = clusterStore.getById(clusterId);
|
||||
if (cluster) {
|
||||
const applier = new ResourceApplier(cluster)
|
||||
applier.kubectlApplyAll(resources)
|
||||
const applier = new ResourceApplier(cluster);
|
||||
applier.kubectlApplyAll(resources);
|
||||
} else {
|
||||
throw `${clusterId} is not a valid cluster id`;
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
@ -5,9 +5,9 @@ import { unlink } from "fs-extra";
|
||||
import { action, computed, observable, reaction, toJS } from "mobx";
|
||||
import { BaseStore } from "./base-store";
|
||||
import { Cluster, ClusterState } from "../main/cluster";
|
||||
import migrations from "../migrations/cluster-store"
|
||||
import migrations from "../migrations/cluster-store";
|
||||
import logger from "../main/logger";
|
||||
import { appEventBus } from "./event-bus"
|
||||
import { appEventBus } from "./event-bus";
|
||||
import { dumpConfigYaml } from "./kube-helpers";
|
||||
import { saveToAppFiles } from "./utils/saveToAppFiles";
|
||||
import { KubeConfig } from "@kubernetes/client-node";
|
||||
@ -86,38 +86,38 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
migrations: migrations,
|
||||
});
|
||||
|
||||
this.pushStateToViewsAutomatically()
|
||||
this.pushStateToViewsAutomatically();
|
||||
}
|
||||
|
||||
protected pushStateToViewsAutomatically() {
|
||||
if (!ipcRenderer) {
|
||||
reaction(() => this.connectedClustersList, () => {
|
||||
this.pushState()
|
||||
})
|
||||
this.pushState();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
registerIpcListener() {
|
||||
logger.info(`[CLUSTER-STORE] start to listen (${webFrame.routingId})`)
|
||||
logger.info(`[CLUSTER-STORE] start to listen (${webFrame.routingId})`);
|
||||
subscribeToBroadcast("cluster:state", (event, clusterId: string, state: ClusterState) => {
|
||||
logger.silly(`[CLUSTER-STORE]: received push-state at ${location.host} (${webFrame.routingId})`, clusterId, state);
|
||||
this.getById(clusterId)?.setState(state)
|
||||
})
|
||||
this.getById(clusterId)?.setState(state);
|
||||
});
|
||||
}
|
||||
|
||||
unregisterIpcListener() {
|
||||
super.unregisterIpcListener()
|
||||
unsubscribeAllFromBroadcast("cluster:state")
|
||||
super.unregisterIpcListener();
|
||||
unsubscribeAllFromBroadcast("cluster:state");
|
||||
}
|
||||
|
||||
pushState() {
|
||||
this.clusters.forEach((c) => {
|
||||
c.pushState()
|
||||
})
|
||||
c.pushState();
|
||||
});
|
||||
}
|
||||
|
||||
get activeClusterId() {
|
||||
return this.activeCluster
|
||||
return this.activeCluster;
|
||||
}
|
||||
|
||||
@computed get clustersList(): Cluster[] {
|
||||
@ -125,7 +125,7 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
}
|
||||
|
||||
@computed get enabledClustersList(): Cluster[] {
|
||||
return this.clustersList.filter((c) => c.enabled)
|
||||
return this.clustersList.filter((c) => c.enabled);
|
||||
}
|
||||
|
||||
@computed get active(): Cluster | null {
|
||||
@ -133,7 +133,7 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
}
|
||||
|
||||
@computed get connectedClustersList(): Cluster[] {
|
||||
return this.clustersList.filter((c) => !c.disconnected)
|
||||
return this.clustersList.filter((c) => !c.disconnected);
|
||||
}
|
||||
|
||||
isActive(id: ClusterId) {
|
||||
@ -149,7 +149,7 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
swapIconOrders(workspace: WorkspaceId, from: number, to: number) {
|
||||
const clusters = this.getByWorkspaceId(workspace);
|
||||
if (from < 0 || to < 0 || from >= clusters.length || to >= clusters.length || isNaN(from) || isNaN(to)) {
|
||||
throw new Error(`invalid from<->to arguments`)
|
||||
throw new Error(`invalid from<->to arguments`);
|
||||
}
|
||||
|
||||
move.mutate(clusters, from, to);
|
||||
@ -170,37 +170,37 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
getByWorkspaceId(workspaceId: string): Cluster[] {
|
||||
const clusters = Array.from(this.clusters.values())
|
||||
.filter(cluster => cluster.workspace === workspaceId);
|
||||
return _.sortBy(clusters, cluster => cluster.preferences.iconOrder)
|
||||
return _.sortBy(clusters, cluster => cluster.preferences.iconOrder);
|
||||
}
|
||||
|
||||
@action
|
||||
addClusters(...models: ClusterModel[]): Cluster[] {
|
||||
const clusters: Cluster[] = []
|
||||
const clusters: Cluster[] = [];
|
||||
models.forEach(model => {
|
||||
clusters.push(this.addCluster(model))
|
||||
})
|
||||
clusters.push(this.addCluster(model));
|
||||
});
|
||||
|
||||
return clusters
|
||||
return clusters;
|
||||
}
|
||||
|
||||
@action
|
||||
addCluster(model: ClusterModel | Cluster): Cluster {
|
||||
appEventBus.emit({ name: "cluster", action: "add" })
|
||||
appEventBus.emit({ name: "cluster", action: "add" });
|
||||
let cluster = model as Cluster;
|
||||
if (!(model instanceof Cluster)) {
|
||||
cluster = new Cluster(model)
|
||||
cluster = new Cluster(model);
|
||||
}
|
||||
this.clusters.set(model.id, cluster);
|
||||
return cluster
|
||||
return cluster;
|
||||
}
|
||||
|
||||
async removeCluster(model: ClusterModel) {
|
||||
await this.removeById(model.id)
|
||||
await this.removeById(model.id);
|
||||
}
|
||||
|
||||
@action
|
||||
async removeById(clusterId: ClusterId) {
|
||||
appEventBus.emit({ name: "cluster", action: "remove" })
|
||||
appEventBus.emit({ name: "cluster", action: "remove" });
|
||||
const cluster = this.getById(clusterId);
|
||||
if (cluster) {
|
||||
this.clusters.delete(clusterId);
|
||||
@ -217,8 +217,8 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
@action
|
||||
removeByWorkspaceId(workspaceId: string) {
|
||||
this.getByWorkspaceId(workspaceId).forEach(cluster => {
|
||||
this.removeById(cluster.id)
|
||||
})
|
||||
this.removeById(cluster.id);
|
||||
});
|
||||
}
|
||||
|
||||
@action
|
||||
@ -235,7 +235,7 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
} else {
|
||||
cluster = new Cluster(clusterModel);
|
||||
if (!cluster.isManaged) {
|
||||
cluster.enabled = true
|
||||
cluster.enabled = true;
|
||||
}
|
||||
}
|
||||
newClusters.set(clusterModel.id, cluster);
|
||||
@ -259,7 +259,7 @@ export class ClusterStore extends BaseStore<ClusterStoreModel> {
|
||||
clusters: this.clustersList.map(cluster => cluster.toJSON()),
|
||||
}, {
|
||||
recurseEverything: true
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { EventEmitter } from "./event-emitter"
|
||||
import { EventEmitter } from "./event-emitter";
|
||||
|
||||
export type AppEvent = {
|
||||
name: string;
|
||||
@ -6,4 +6,4 @@ export type AppEvent = {
|
||||
params?: object;
|
||||
}
|
||||
|
||||
export const appEventBus = new EventEmitter<[AppEvent]>()
|
||||
export const appEventBus = new EventEmitter<[AppEvent]>();
|
||||
|
||||
@ -35,6 +35,6 @@ export class EventEmitter<D extends [...any[]]> {
|
||||
const result = callback(...data);
|
||||
if (result === false) return; // break cycle
|
||||
return true;
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -7,24 +7,24 @@ import logger from "../main/logger";
|
||||
import { clusterFrameMap } from "./cluster-frames";
|
||||
|
||||
export function handleRequest(channel: string, listener: (...args: any[]) => any) {
|
||||
ipcMain.handle(channel, listener)
|
||||
ipcMain.handle(channel, listener);
|
||||
}
|
||||
|
||||
export async function requestMain(channel: string, ...args: any[]) {
|
||||
return ipcRenderer.invoke(channel, ...args)
|
||||
return ipcRenderer.invoke(channel, ...args);
|
||||
}
|
||||
|
||||
async function getSubFrames(): Promise<number[]> {
|
||||
const subFrames: number[] = [];
|
||||
clusterFrameMap.forEach((frameId, _) => {
|
||||
subFrames.push(frameId)
|
||||
subFrames.push(frameId);
|
||||
});
|
||||
return subFrames;
|
||||
}
|
||||
|
||||
export function broadcastMessage(channel: string, ...args: any[]) {
|
||||
const views = (webContents || remote?.webContents)?.getAllWebContents();
|
||||
if (!views) return
|
||||
if (!views) return;
|
||||
|
||||
views.forEach(webContent => {
|
||||
const type = webContent.getType();
|
||||
@ -32,39 +32,39 @@ export function broadcastMessage(channel: string, ...args: any[]) {
|
||||
webContent.send(channel, ...args);
|
||||
getSubFrames().then((frames) => {
|
||||
frames.map((frameId) => {
|
||||
webContent.sendToFrame(frameId, channel, ...args)
|
||||
})
|
||||
}).catch((e) => e)
|
||||
})
|
||||
webContent.sendToFrame(frameId, channel, ...args);
|
||||
});
|
||||
}).catch((e) => e);
|
||||
});
|
||||
if (ipcRenderer) {
|
||||
ipcRenderer.send(channel, ...args)
|
||||
ipcRenderer.send(channel, ...args);
|
||||
} else {
|
||||
ipcMain.emit(channel, ...args)
|
||||
ipcMain.emit(channel, ...args);
|
||||
}
|
||||
}
|
||||
|
||||
export function subscribeToBroadcast(channel: string, listener: (...args: any[]) => any) {
|
||||
if (ipcRenderer) {
|
||||
ipcRenderer.on(channel, listener)
|
||||
ipcRenderer.on(channel, listener);
|
||||
} else {
|
||||
ipcMain.on(channel, listener)
|
||||
ipcMain.on(channel, listener);
|
||||
}
|
||||
|
||||
return listener
|
||||
return listener;
|
||||
}
|
||||
|
||||
export function unsubscribeFromBroadcast(channel: string, listener: (...args: any[]) => any) {
|
||||
if (ipcRenderer) {
|
||||
ipcRenderer.off(channel, listener)
|
||||
ipcRenderer.off(channel, listener);
|
||||
} else {
|
||||
ipcMain.off(channel, listener)
|
||||
ipcMain.off(channel, listener);
|
||||
}
|
||||
}
|
||||
|
||||
export function unsubscribeAllFromBroadcast(channel: string) {
|
||||
if (ipcRenderer) {
|
||||
ipcRenderer.removeAllListeners(channel)
|
||||
ipcRenderer.removeAllListeners(channel);
|
||||
} else {
|
||||
ipcMain.removeAllListeners(channel)
|
||||
ipcMain.removeAllListeners(channel);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { KubeConfig, V1Node, V1Pod } from "@kubernetes/client-node"
|
||||
import { KubeConfig, V1Node, V1Pod } from "@kubernetes/client-node";
|
||||
import fse from "fs-extra";
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import yaml from "js-yaml"
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import yaml from "js-yaml";
|
||||
import logger from "../main/logger";
|
||||
import commandExists from "command-exists";
|
||||
import { ExecValidationNotFoundError } from "./custom-errors";
|
||||
@ -25,7 +25,7 @@ export function loadConfig(pathOrContent?: string): KubeConfig {
|
||||
kc.loadFromString(pathOrContent);
|
||||
}
|
||||
|
||||
return kc
|
||||
return kc;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -39,33 +39,33 @@ export function validateConfig(config: KubeConfig | string): KubeConfig {
|
||||
if (typeof config == "string") {
|
||||
config = loadConfig(config);
|
||||
}
|
||||
logger.debug(`validating kube config: ${JSON.stringify(config)}`)
|
||||
logger.debug(`validating kube config: ${JSON.stringify(config)}`);
|
||||
if (!config.users || config.users.length == 0) {
|
||||
throw new Error("No users provided in config")
|
||||
throw new Error("No users provided in config");
|
||||
}
|
||||
if (!config.clusters || config.clusters.length == 0) {
|
||||
throw new Error("No clusters provided in config")
|
||||
throw new Error("No clusters provided in config");
|
||||
}
|
||||
if (!config.contexts || config.contexts.length == 0) {
|
||||
throw new Error("No contexts provided in config")
|
||||
throw new Error("No contexts provided in config");
|
||||
}
|
||||
|
||||
return config
|
||||
return config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Breaks kube config into several configs. Each context as it own KubeConfig object
|
||||
*/
|
||||
export function splitConfig(kubeConfig: KubeConfig): KubeConfig[] {
|
||||
const configs: KubeConfig[] = []
|
||||
const configs: KubeConfig[] = [];
|
||||
if (!kubeConfig.contexts) {
|
||||
return configs;
|
||||
}
|
||||
kubeConfig.contexts.forEach(ctx => {
|
||||
const kc = new KubeConfig();
|
||||
kc.clusters = [kubeConfig.getCluster(ctx.cluster)].filter(n => n);
|
||||
kc.users = [kubeConfig.getUser(ctx.user)].filter(n => n)
|
||||
kc.contexts = [kubeConfig.getContextObject(ctx.name)].filter(n => n)
|
||||
kc.users = [kubeConfig.getUser(ctx.user)].filter(n => n);
|
||||
kc.contexts = [kubeConfig.getContextObject(ctx.name)].filter(n => n);
|
||||
kc.setCurrentContext(ctx.name);
|
||||
|
||||
configs.push(kc);
|
||||
@ -88,7 +88,7 @@ export function dumpConfigYaml(kubeConfig: Partial<KubeConfig>): string {
|
||||
server: cluster.server,
|
||||
'insecure-skip-tls-verify': cluster.skipTLSVerify
|
||||
}
|
||||
}
|
||||
};
|
||||
}),
|
||||
contexts: kubeConfig.contexts.map(context => {
|
||||
return {
|
||||
@ -98,7 +98,7 @@ export function dumpConfigYaml(kubeConfig: Partial<KubeConfig>): string {
|
||||
user: context.user,
|
||||
namespace: context.namespace
|
||||
}
|
||||
}
|
||||
};
|
||||
}),
|
||||
users: kubeConfig.users.map(user => {
|
||||
return {
|
||||
@ -114,9 +114,9 @@ export function dumpConfigYaml(kubeConfig: Partial<KubeConfig>): string {
|
||||
username: user.username,
|
||||
password: user.password
|
||||
}
|
||||
}
|
||||
};
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
logger.debug("Dumping KubeConfig:", config);
|
||||
|
||||
@ -127,20 +127,20 @@ export function dumpConfigYaml(kubeConfig: Partial<KubeConfig>): string {
|
||||
export function podHasIssues(pod: V1Pod) {
|
||||
// Logic adapted from dashboard
|
||||
const notReady = !!pod.status.conditions.find(condition => {
|
||||
return condition.type == "Ready" && condition.status !== "True"
|
||||
return condition.type == "Ready" && condition.status !== "True";
|
||||
});
|
||||
|
||||
return (
|
||||
notReady ||
|
||||
pod.status.phase !== "Running" ||
|
||||
pod.spec.priority > 500000 // We're interested in high prio pods events regardless of their running status
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export function getNodeWarningConditions(node: V1Node) {
|
||||
return node.status.conditions.filter(c =>
|
||||
c.status.toLowerCase() === "true" && c.type !== "Ready" && c.type !== "HostUpgrades"
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@ -5,8 +5,8 @@ import { PrometheusStacklight } from "../main/prometheus/stacklight";
|
||||
import { PrometheusProviderRegistry } from "../main/prometheus/provider-registry";
|
||||
|
||||
[PrometheusLens, PrometheusHelm, PrometheusOperator, PrometheusStacklight].forEach(providerClass => {
|
||||
const provider = new providerClass()
|
||||
PrometheusProviderRegistry.registerProvider(provider.id, provider)
|
||||
const provider = new providerClass();
|
||||
PrometheusProviderRegistry.registerProvider(provider.id, provider);
|
||||
});
|
||||
|
||||
export const prometheusProviders = PrometheusProviderRegistry.getProviders()
|
||||
export const prometheusProviders = PrometheusProviderRegistry.getProviders();
|
||||
@ -1,6 +1,6 @@
|
||||
// Register custom protocols
|
||||
|
||||
import { protocol } from "electron"
|
||||
import { protocol } from "electron";
|
||||
import path from "path";
|
||||
|
||||
export function registerFileProtocol(name: string, basePath: string) {
|
||||
@ -8,5 +8,5 @@ export function registerFileProtocol(name: string, basePath: string) {
|
||||
const filePath = request.url.replace(name + "://", "");
|
||||
const absPath = path.resolve(basePath, filePath);
|
||||
callback({ path: absPath });
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,28 +1,28 @@
|
||||
import request from "request"
|
||||
import requestPromise from "request-promise-native"
|
||||
import { userStore } from "./user-store"
|
||||
import request from "request";
|
||||
import requestPromise from "request-promise-native";
|
||||
import { userStore } from "./user-store";
|
||||
|
||||
// todo: get rid of "request" (deprecated)
|
||||
// https://github.com/lensapp/lens/issues/459
|
||||
|
||||
function getDefaultRequestOpts(): Partial<request.Options> {
|
||||
const { httpsProxy, allowUntrustedCAs } = userStore.preferences
|
||||
const { httpsProxy, allowUntrustedCAs } = userStore.preferences;
|
||||
return {
|
||||
proxy: httpsProxy || undefined,
|
||||
rejectUnauthorized: !allowUntrustedCAs,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
export function customRequest(opts: request.Options) {
|
||||
return request.defaults(getDefaultRequestOpts())(opts)
|
||||
return request.defaults(getDefaultRequestOpts())(opts);
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated
|
||||
*/
|
||||
export function customRequestPromise(opts: requestPromise.Options) {
|
||||
return requestPromise.defaults(getDefaultRequestOpts())(opts)
|
||||
return requestPromise.defaults(getDefaultRequestOpts())(opts);
|
||||
}
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
import { isMac, isWindows } from "./vars";
|
||||
import winca from "win-ca"
|
||||
import macca from "mac-ca"
|
||||
import logger from "../main/logger"
|
||||
import winca from "win-ca";
|
||||
import macca from "mac-ca";
|
||||
import logger from "../main/logger";
|
||||
|
||||
if (isMac) {
|
||||
for (const crt of macca.all()) {
|
||||
const attributes = crt.issuer?.attributes?.map((a: any) => `${a.name}=${a.value}`)
|
||||
logger.debug("Using host CA: " + attributes.join(","))
|
||||
const attributes = crt.issuer?.attributes?.map((a: any) => `${a.name}=${a.value}`);
|
||||
logger.debug("Using host CA: " + attributes.join(","));
|
||||
}
|
||||
}
|
||||
if (isWindows) {
|
||||
winca.inject("+") // see: https://github.com/ukoloff/win-ca#caveats
|
||||
winca.inject("+"); // see: https://github.com/ukoloff/win-ca#caveats
|
||||
}
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import type { ThemeId } from "../renderer/theme.store";
|
||||
import { app, remote } from 'electron';
|
||||
import semver from "semver"
|
||||
import { readFile } from "fs-extra"
|
||||
import semver from "semver";
|
||||
import { readFile } from "fs-extra";
|
||||
import { action, observable, reaction, toJS } from "mobx";
|
||||
import { BaseStore } from "./base-store";
|
||||
import migrations from "../migrations/user-store"
|
||||
import migrations from "../migrations/user-store";
|
||||
import { getAppVersion } from "./utils/app-version";
|
||||
import { kubeConfigDefaultPath, loadConfig } from "./kube-helpers";
|
||||
import { appEventBus } from "./event-bus"
|
||||
import { appEventBus } from "./event-bus";
|
||||
import logger from "../main/logger";
|
||||
import path from 'path';
|
||||
|
||||
@ -66,7 +66,7 @@ export class UserStore extends BaseStore<UserStoreModel> {
|
||||
if (app) {
|
||||
// track telemetry availability
|
||||
reaction(() => this.preferences.allowTelemetry, allowed => {
|
||||
appEventBus.emit({name: "telemetry", action: allowed ? "enabled" : "disabled"})
|
||||
appEventBus.emit({name: "telemetry", action: allowed ? "enabled" : "disabled"});
|
||||
});
|
||||
|
||||
// open at system start-up
|
||||
@ -95,7 +95,7 @@ export class UserStore extends BaseStore<UserStoreModel> {
|
||||
|
||||
@action
|
||||
saveLastSeenAppVersion() {
|
||||
appEventBus.emit({name: "app", action: "whats-new-seen"})
|
||||
appEventBus.emit({name: "app", action: "whats-new-seen"});
|
||||
this.lastSeenAppVersion = getAppVersion();
|
||||
}
|
||||
|
||||
@ -127,12 +127,12 @@ export class UserStore extends BaseStore<UserStoreModel> {
|
||||
* @returns string
|
||||
*/
|
||||
getDefaultKubectlPath(): string {
|
||||
return path.join((app || remote.app).getPath("userData"), "binaries")
|
||||
return path.join((app || remote.app).getPath("userData"), "binaries");
|
||||
}
|
||||
|
||||
@action
|
||||
protected async fromStore(data: Partial<UserStoreModel> = {}) {
|
||||
const { lastSeenAppVersion, seenContexts = [], preferences, kubeConfigPath } = data
|
||||
const { lastSeenAppVersion, seenContexts = [], preferences, kubeConfigPath } = data;
|
||||
if (lastSeenAppVersion) {
|
||||
this.lastSeenAppVersion = lastSeenAppVersion;
|
||||
}
|
||||
@ -149,10 +149,10 @@ export class UserStore extends BaseStore<UserStoreModel> {
|
||||
lastSeenAppVersion: this.lastSeenAppVersion,
|
||||
seenContexts: Array.from(this.seenContexts),
|
||||
preferences: this.preferences,
|
||||
}
|
||||
};
|
||||
return toJS(model, {
|
||||
recurseEverything: true,
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import packageInfo from "../../../package.json"
|
||||
import packageInfo from "../../../package.json";
|
||||
|
||||
export function getAppVersion(): string {
|
||||
return packageInfo.version;
|
||||
@ -9,5 +9,5 @@ export function getBundledKubectlVersion(): string {
|
||||
}
|
||||
|
||||
export function getBundledExtensions(): string[] {
|
||||
return packageInfo.lens?.extensions || []
|
||||
return packageInfo.lens?.extensions || [];
|
||||
}
|
||||
|
||||
@ -6,7 +6,7 @@ export function autobind() {
|
||||
return function (target: Constructor | object, prop?: string, descriptor?: PropertyDescriptor) {
|
||||
if (target instanceof Function) return bindClass(target);
|
||||
else return bindMethod(target, prop, descriptor);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function bindClass<T extends Constructor>(constructor: T) {
|
||||
@ -22,12 +22,12 @@ function bindClass<T extends Constructor>(constructor: T) {
|
||||
if (skipMethod(prop)) return;
|
||||
const boundDescriptor = bindMethod(proto, prop, descriptors[prop]);
|
||||
Object.defineProperty(proto, prop, boundDescriptor);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
function bindMethod(target: object, prop?: string, descriptor?: PropertyDescriptor) {
|
||||
if (!descriptor || typeof descriptor.value !== "function") {
|
||||
throw new Error(`@autobind() must be used on class or method only`)
|
||||
throw new Error(`@autobind() must be used on class or method only`);
|
||||
}
|
||||
const { value: func, enumerable, configurable } = descriptor;
|
||||
const boundFunc = new WeakMap<object, Function>();
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { compile } from "path-to-regexp"
|
||||
import { compile } from "path-to-regexp";
|
||||
|
||||
export interface IURLParams<P extends object = {}, Q extends object = {}> {
|
||||
params?: P;
|
||||
@ -8,7 +8,7 @@ export interface IURLParams<P extends object = {}, Q extends object = {}> {
|
||||
export function buildURL<P extends object = {}, Q extends object = {}>(path: string | any) {
|
||||
const pathBuilder = compile(String(path));
|
||||
return function ({ params, query }: IURLParams<P, Q> = {}) {
|
||||
const queryParams = query ? new URLSearchParams(Object.entries(query)).toString() : ""
|
||||
return pathBuilder(params) + (queryParams ? `?${queryParams}` : "")
|
||||
}
|
||||
const queryParams = query ? new URLSearchParams(Object.entries(query)).toString() : "";
|
||||
return pathBuilder(params) + (queryParams ? `?${queryParams}` : "");
|
||||
};
|
||||
}
|
||||
|
||||
@ -5,7 +5,7 @@
|
||||
export function defineGlobal(propName: string, descriptor: PropertyDescriptor) {
|
||||
const scope = typeof global !== "undefined" ? global : window;
|
||||
if (scope.hasOwnProperty(propName)) {
|
||||
console.info(`Global variable "${propName}" already exists. Skipping.`)
|
||||
console.info(`Global variable "${propName}" already exists. Skipping.`);
|
||||
return;
|
||||
}
|
||||
Object.defineProperty(scope, propName, descriptor);
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
// Common utils (main OR renderer)
|
||||
|
||||
export * from "./app-version"
|
||||
export * from "./autobind"
|
||||
export * from "./base64"
|
||||
export * from "./camelCase"
|
||||
export * from "./cloneJson"
|
||||
export * from "./debouncePromise"
|
||||
export * from "./defineGlobal"
|
||||
export * from "./getRandId"
|
||||
export * from "./splitArray"
|
||||
export * from "./saveToAppFiles"
|
||||
export * from "./singleton"
|
||||
export * from "./openExternal"
|
||||
export * from "./app-version";
|
||||
export * from "./autobind";
|
||||
export * from "./base64";
|
||||
export * from "./camelCase";
|
||||
export * from "./cloneJson";
|
||||
export * from "./debouncePromise";
|
||||
export * from "./defineGlobal";
|
||||
export * from "./getRandId";
|
||||
export * from "./splitArray";
|
||||
export * from "./saveToAppFiles";
|
||||
export * from "./singleton";
|
||||
export * from "./openExternal";
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
// Opens a link in external browser
|
||||
import { shell } from "electron"
|
||||
import { shell } from "electron";
|
||||
|
||||
export function openExternal(url: string) {
|
||||
return shell.openExternal(url);
|
||||
|
||||
@ -2,7 +2,7 @@
|
||||
import path from "path";
|
||||
import { app, remote } from "electron";
|
||||
import { ensureDirSync, writeFileSync } from "fs-extra";
|
||||
import { WriteFileOptions } from "fs"
|
||||
import { WriteFileOptions } from "fs";
|
||||
|
||||
export function saveToAppFiles(filePath: string, contents: any, options?: WriteFileOptions): string {
|
||||
const absPath = path.resolve((app || remote.app).getPath("userData"), filePath);
|
||||
|
||||
@ -24,5 +24,5 @@ class Singleton {
|
||||
}
|
||||
}
|
||||
|
||||
export { Singleton }
|
||||
export { Singleton };
|
||||
export default Singleton;
|
||||
@ -15,5 +15,5 @@ export function splitArray<T>(array: T[], element: T): [T[], T[], boolean] {
|
||||
if (index < 0) {
|
||||
return [array, [], false];
|
||||
}
|
||||
return [array.slice(0, index), array.slice(index + 1, array.length), true]
|
||||
return [array.slice(0, index), array.slice(index + 1, array.length), true];
|
||||
}
|
||||
|
||||
@ -1,19 +1,19 @@
|
||||
// App's common configuration for any process (main, renderer, build pipeline, etc.)
|
||||
import path from "path";
|
||||
import packageInfo from "../../package.json"
|
||||
import packageInfo from "../../package.json";
|
||||
import { defineGlobal } from "./utils/defineGlobal";
|
||||
|
||||
export const isMac = process.platform === "darwin"
|
||||
export const isWindows = process.platform === "win32"
|
||||
export const isLinux = process.platform === "linux"
|
||||
export const isMac = process.platform === "darwin";
|
||||
export const isWindows = process.platform === "win32";
|
||||
export const isLinux = process.platform === "linux";
|
||||
export const isDebugging = process.env.DEBUG === "true";
|
||||
export const isSnap = !!process.env["SNAP"]
|
||||
export const isProduction = process.env.NODE_ENV === "production"
|
||||
export const isSnap = !!process.env["SNAP"];
|
||||
export const isProduction = process.env.NODE_ENV === "production";
|
||||
export const isTestEnv = !!process.env.JEST_WORKER_ID;
|
||||
export const isDevelopment = !isTestEnv && !isProduction;
|
||||
|
||||
export const appName = `${packageInfo.productName}${isDevelopment ? "Dev" : ""}`
|
||||
export const publicPath = "/build/"
|
||||
export const appName = `${packageInfo.productName}${isDevelopment ? "Dev" : ""}`;
|
||||
export const publicPath = "/build/";
|
||||
|
||||
// Webpack build paths
|
||||
export const contextDir = process.cwd();
|
||||
@ -22,7 +22,7 @@ export const mainDir = path.join(contextDir, "src/main");
|
||||
export const rendererDir = path.join(contextDir, "src/renderer");
|
||||
export const htmlTemplate = path.resolve(rendererDir, "template.html");
|
||||
export const sassCommonVars = path.resolve(rendererDir, "components/vars.scss");
|
||||
export const webpackDevServerPort = 9009
|
||||
export const webpackDevServerPort = 9009;
|
||||
|
||||
// Special runtime paths
|
||||
defineGlobal("__static", {
|
||||
@ -30,14 +30,14 @@ defineGlobal("__static", {
|
||||
if (isDevelopment) {
|
||||
return path.resolve(contextDir, "static");
|
||||
}
|
||||
return path.resolve(process.resourcesPath, "static")
|
||||
return path.resolve(process.resourcesPath, "static");
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
// Apis
|
||||
export const apiPrefix = "/api" // local router apis
|
||||
export const apiKubePrefix = "/api-kube" // k8s cluster apis
|
||||
export const apiPrefix = "/api"; // local router apis
|
||||
export const apiKubePrefix = "/api-kube"; // k8s cluster apis
|
||||
|
||||
// Links
|
||||
export const issuesTrackerUrl = "https://github.com/lensapp/lens/issues"
|
||||
export const slackUrl = "https://join.slack.com/t/k8slens/shared_invite/enQtOTc5NjAyNjYyOTk4LWU1NDQ0ZGFkOWJkNTRhYTc2YjVmZDdkM2FkNGM5MjhiYTRhMDU2NDQ1MzIyMDA4ZGZlNmExOTc0N2JmY2M3ZGI"
|
||||
export const issuesTrackerUrl = "https://github.com/lensapp/lens/issues";
|
||||
export const slackUrl = "https://join.slack.com/t/k8slens/shared_invite/enQtOTc5NjAyNjYyOTk4LWU1NDQ0ZGFkOWJkNTRhYTc2YjVmZDdkM2FkNGM5MjhiYTRhMDU2NDQ1MzIyMDA4ZGZlNmExOTc0N2JmY2M3ZGI";
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { ipcRenderer } from "electron";
|
||||
import { action, computed, observable, toJS, reaction } from "mobx";
|
||||
import { BaseStore } from "./base-store";
|
||||
import { clusterStore } from "./cluster-store"
|
||||
import { clusterStore } from "./cluster-store";
|
||||
import { appEventBus } from "./event-bus";
|
||||
import { broadcastMessage } from "../common/ipc";
|
||||
import logger from "../main/logger";
|
||||
@ -32,33 +32,33 @@ export class Workspace implements WorkspaceModel, WorkspaceState {
|
||||
@observable enabled: boolean
|
||||
|
||||
constructor(data: WorkspaceModel) {
|
||||
Object.assign(this, data)
|
||||
Object.assign(this, data);
|
||||
|
||||
if (!ipcRenderer) {
|
||||
reaction(() => this.getState(), () => {
|
||||
this.pushState()
|
||||
})
|
||||
this.pushState();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
get isManaged(): boolean {
|
||||
return !!this.ownerRef
|
||||
return !!this.ownerRef;
|
||||
}
|
||||
|
||||
getState(): WorkspaceState {
|
||||
return {
|
||||
enabled: this.enabled
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pushState(state = this.getState()) {
|
||||
logger.silly("[WORKSPACE] pushing state", {...state, id: this.id})
|
||||
broadcastMessage("workspace:state", this.id, toJS(state))
|
||||
logger.silly("[WORKSPACE] pushing state", {...state, id: this.id});
|
||||
broadcastMessage("workspace:state", this.id, toJS(state));
|
||||
}
|
||||
|
||||
@action
|
||||
setState(state: WorkspaceState) {
|
||||
Object.assign(this, state)
|
||||
Object.assign(this, state);
|
||||
}
|
||||
|
||||
toJSON(): WorkspaceModel {
|
||||
@ -67,7 +67,7 @@ export class Workspace implements WorkspaceModel, WorkspaceState {
|
||||
name: this.name,
|
||||
description: this.description,
|
||||
ownerRef: this.ownerRef
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -81,21 +81,21 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
|
||||
if (!ipcRenderer) {
|
||||
setInterval(() => {
|
||||
this.pushState()
|
||||
}, 5000)
|
||||
this.pushState();
|
||||
}, 5000);
|
||||
}
|
||||
}
|
||||
|
||||
registerIpcListener() {
|
||||
logger.info("[WORKSPACE-STORE] starting to listen state events")
|
||||
logger.info("[WORKSPACE-STORE] starting to listen state events");
|
||||
ipcRenderer.on("workspace:state", (event, workspaceId: string, state: WorkspaceState) => {
|
||||
this.getById(workspaceId)?.setState(state)
|
||||
})
|
||||
this.getById(workspaceId)?.setState(state);
|
||||
});
|
||||
}
|
||||
|
||||
unregisterIpcListener() {
|
||||
super.unregisterIpcListener()
|
||||
ipcRenderer.removeAllListeners("workspace:state")
|
||||
super.unregisterIpcListener();
|
||||
ipcRenderer.removeAllListeners("workspace:state");
|
||||
}
|
||||
|
||||
@observable currentWorkspaceId = WorkspaceStore.defaultId;
|
||||
@ -121,8 +121,8 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
|
||||
pushState() {
|
||||
this.workspaces.forEach((w) => {
|
||||
w.pushState()
|
||||
})
|
||||
w.pushState();
|
||||
});
|
||||
}
|
||||
|
||||
isDefault(id: WorkspaceId) {
|
||||
@ -154,7 +154,7 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
return;
|
||||
}
|
||||
this.workspaces.set(id, workspace);
|
||||
appEventBus.emit({name: "workspace", action: "add"})
|
||||
appEventBus.emit({name: "workspace", action: "add"});
|
||||
return workspace;
|
||||
}
|
||||
|
||||
@ -166,7 +166,7 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
|
||||
@action
|
||||
removeWorkspace(workspace: Workspace) {
|
||||
this.removeWorkspaceById(workspace.id)
|
||||
this.removeWorkspaceById(workspace.id);
|
||||
}
|
||||
|
||||
@action
|
||||
@ -180,24 +180,24 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
this.currentWorkspaceId = WorkspaceStore.defaultId; // reset to default
|
||||
}
|
||||
this.workspaces.delete(id);
|
||||
appEventBus.emit({name: "workspace", action: "remove"})
|
||||
clusterStore.removeByWorkspaceId(id)
|
||||
appEventBus.emit({name: "workspace", action: "remove"});
|
||||
clusterStore.removeByWorkspaceId(id);
|
||||
}
|
||||
|
||||
@action
|
||||
protected fromStore({ currentWorkspace, workspaces = [] }: WorkspaceStoreModel) {
|
||||
if (currentWorkspace) {
|
||||
this.currentWorkspaceId = currentWorkspace
|
||||
this.currentWorkspaceId = currentWorkspace;
|
||||
}
|
||||
if (workspaces.length) {
|
||||
this.workspaces.clear();
|
||||
workspaces.forEach(ws => {
|
||||
const workspace = new Workspace(ws)
|
||||
const workspace = new Workspace(ws);
|
||||
if (!workspace.isManaged) {
|
||||
workspace.enabled = true
|
||||
workspace.enabled = true;
|
||||
}
|
||||
this.workspaces.set(workspace.id, workspace)
|
||||
})
|
||||
this.workspaces.set(workspace.id, workspace);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -207,8 +207,8 @@ export class WorkspaceStore extends BaseStore<WorkspaceStoreModel> {
|
||||
workspaces: this.workspacesList.map((w) => w.toJSON()),
|
||||
}, {
|
||||
recurseEverything: true
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const workspaceStore = WorkspaceStore.getInstance<WorkspaceStore>()
|
||||
export const workspaceStore = WorkspaceStore.getInstance<WorkspaceStore>();
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { LensExtension } from "../lens-extension"
|
||||
import { LensExtension } from "../lens-extension";
|
||||
|
||||
let ext: LensExtension = null
|
||||
let ext: LensExtension = null;
|
||||
|
||||
describe("lens extension", () => {
|
||||
beforeEach(async () => {
|
||||
@ -12,12 +12,12 @@ describe("lens extension", () => {
|
||||
manifestPath: "/this/is/fake/package.json",
|
||||
isBundled: false,
|
||||
isEnabled: true
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
describe("name", () => {
|
||||
it("returns name", () => {
|
||||
expect(ext.name).toBe("foo-bar")
|
||||
})
|
||||
})
|
||||
})
|
||||
expect(ext.name).toBe("foo-bar");
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import fs from "fs";
|
||||
import path from "path"
|
||||
import hb from "handlebars"
|
||||
import { observable } from "mobx"
|
||||
import { ResourceApplier } from "../main/resource-applier"
|
||||
import path from "path";
|
||||
import hb from "handlebars";
|
||||
import { observable } from "mobx";
|
||||
import { ResourceApplier } from "../main/resource-applier";
|
||||
import { Cluster } from "../main/cluster";
|
||||
import logger from "../main/logger";
|
||||
import { app } from "electron"
|
||||
import { app } from "electron";
|
||||
import { requestMain } from "../common/ipc";
|
||||
import { clusterKubectlApplyAllHandler } from "../common/cluster-ipc";
|
||||
|
||||
@ -38,9 +38,9 @@ export abstract class ClusterFeature {
|
||||
|
||||
protected async applyResources(cluster: Cluster, resources: string[]) {
|
||||
if (app) {
|
||||
await new ResourceApplier(cluster).kubectlApplyAll(resources)
|
||||
await new ResourceApplier(cluster).kubectlApplyAll(resources);
|
||||
} else {
|
||||
await requestMain(clusterKubectlApplyAllHandler, cluster.id, resources)
|
||||
await requestMain(clusterKubectlApplyAllHandler, cluster.id, resources);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { getAppVersion } from "../../common/utils";
|
||||
|
||||
export const version = getAppVersion()
|
||||
export { isSnap, isWindows, isMac, isLinux, appName, slackUrl, issuesTrackerUrl } from "../../common/vars"
|
||||
export const version = getAppVersion();
|
||||
export { isSnap, isWindows, isMac, isLinux, appName, slackUrl, issuesTrackerUrl } from "../../common/vars";
|
||||
@ -1,2 +1,2 @@
|
||||
export { ClusterFeature as Feature } from "../cluster-feature"
|
||||
export type { ClusterFeatureStatus as FeatureStatus } from "../cluster-feature"
|
||||
export { ClusterFeature as Feature } from "../cluster-feature";
|
||||
export type { ClusterFeatureStatus as FeatureStatus } from "../cluster-feature";
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
export { appEventBus } from "../../common/event-bus"
|
||||
export type { AppEvent } from "../../common/event-bus"
|
||||
export { appEventBus } from "../../common/event-bus";
|
||||
export type { AppEvent } from "../../common/event-bus";
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
// Lens-extensions api developer's kit
|
||||
export * from "../lens-main-extension"
|
||||
export * from "../lens-renderer-extension"
|
||||
export * from "../lens-main-extension";
|
||||
export * from "../lens-renderer-extension";
|
||||
|
||||
// APIs
|
||||
import * as App from "./app"
|
||||
import * as EventBus from "./event-bus"
|
||||
import * as Store from "./stores"
|
||||
import * as Util from "./utils"
|
||||
import * as ClusterFeature from "./cluster-feature"
|
||||
import * as Interface from "../interfaces"
|
||||
import * as App from "./app";
|
||||
import * as EventBus from "./event-bus";
|
||||
import * as Store from "./stores";
|
||||
import * as Util from "./utils";
|
||||
import * as ClusterFeature from "./cluster-feature";
|
||||
import * as Interface from "../interfaces";
|
||||
|
||||
export {
|
||||
App,
|
||||
@ -17,4 +17,4 @@ export {
|
||||
Interface,
|
||||
Store,
|
||||
Util,
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
export { ExtensionStore } from "../extension-store"
|
||||
export { clusterStore } from "../../common/cluster-store"
|
||||
export type { ClusterModel } from "../../common/cluster-store"
|
||||
export { Cluster } from "../../main/cluster"
|
||||
export { workspaceStore, Workspace } from "../../common/workspace-store"
|
||||
export type { WorkspaceModel } from "../../common/workspace-store"
|
||||
export { ExtensionStore } from "../extension-store";
|
||||
export { clusterStore } from "../../common/cluster-store";
|
||||
export type { ClusterModel } from "../../common/cluster-store";
|
||||
export { Cluster } from "../../main/cluster";
|
||||
export { workspaceStore, Workspace } from "../../common/workspace-store";
|
||||
export type { WorkspaceModel } from "../../common/workspace-store";
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
export { Singleton, openExternal } from "../../common/utils"
|
||||
export { prevDefault, stopPropagation } from "../../renderer/utils/prevDefault"
|
||||
export { cssNames } from "../../renderer/utils/cssNames"
|
||||
export { Singleton, openExternal } from "../../common/utils";
|
||||
export { prevDefault, stopPropagation } from "../../renderer/utils/prevDefault";
|
||||
export { cssNames } from "../../renderer/utils/cssNames";
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
// Extension-api types generation bundle
|
||||
|
||||
export * from "./core-api"
|
||||
export * from "./renderer-api"
|
||||
export * from "./core-api";
|
||||
export * from "./renderer-api";
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
import type { LensExtension, LensExtensionConstructor, LensExtensionId } from "./lens-extension"
|
||||
import type { LensMainExtension } from "./lens-main-extension"
|
||||
import type { LensRendererExtension } from "./lens-renderer-extension"
|
||||
import type { LensExtension, LensExtensionConstructor, LensExtensionId } from "./lens-extension";
|
||||
import type { LensMainExtension } from "./lens-main-extension";
|
||||
import type { LensRendererExtension } from "./lens-renderer-extension";
|
||||
import type { InstalledExtension } from "./extension-manager";
|
||||
import path from "path"
|
||||
import { broadcastMessage, handleRequest, requestMain, subscribeToBroadcast } from "../common/ipc"
|
||||
import { action, computed, observable, reaction, toJS, when } from "mobx"
|
||||
import logger from "../main/logger"
|
||||
import { app, ipcRenderer, remote } from "electron"
|
||||
import path from "path";
|
||||
import { broadcastMessage, handleRequest, requestMain, subscribeToBroadcast } from "../common/ipc";
|
||||
import { action, computed, observable, reaction, toJS, when } from "mobx";
|
||||
import logger from "../main/logger";
|
||||
import { app, ipcRenderer, remote } from "electron";
|
||||
import * as registries from "./registries";
|
||||
import { extensionsStore } from "./extensions-store";
|
||||
|
||||
// lazy load so that we get correct userData
|
||||
export function extensionPackagesRoot() {
|
||||
return path.join((app || remote.app).getPath("userData"))
|
||||
return path.join((app || remote.app).getPath("userData"));
|
||||
}
|
||||
|
||||
export class ExtensionLoader {
|
||||
@ -29,22 +29,22 @@ export class ExtensionLoader {
|
||||
if (ext.isBundled) {
|
||||
extensions.delete(extId);
|
||||
}
|
||||
})
|
||||
});
|
||||
return extensions;
|
||||
}
|
||||
|
||||
@action
|
||||
async init() {
|
||||
if (ipcRenderer) {
|
||||
this.initRenderer()
|
||||
this.initRenderer();
|
||||
} else {
|
||||
this.initMain()
|
||||
this.initMain();
|
||||
}
|
||||
extensionsStore.manageState(this);
|
||||
}
|
||||
|
||||
initExtensions(extensions?: Map<LensExtensionId, InstalledExtension>) {
|
||||
this.extensions.replace(extensions)
|
||||
this.extensions.replace(extensions);
|
||||
}
|
||||
|
||||
protected async initMain() {
|
||||
@ -53,12 +53,12 @@ export class ExtensionLoader {
|
||||
this.broadcastExtensions();
|
||||
|
||||
reaction(() => this.extensions.toJS(), () => {
|
||||
this.broadcastExtensions()
|
||||
})
|
||||
this.broadcastExtensions();
|
||||
});
|
||||
|
||||
handleRequest(this.requestExtensionsChannel, () => {
|
||||
return Array.from(this.toJSON())
|
||||
})
|
||||
return Array.from(this.toJSON());
|
||||
});
|
||||
}
|
||||
|
||||
protected async initRenderer() {
|
||||
@ -66,25 +66,25 @@ export class ExtensionLoader {
|
||||
this.isLoaded = true;
|
||||
extensions.forEach(([extId, ext]) => {
|
||||
if (!this.extensions.has(extId)) {
|
||||
this.extensions.set(extId, ext)
|
||||
this.extensions.set(extId, ext);
|
||||
}
|
||||
})
|
||||
}
|
||||
requestMain(this.requestExtensionsChannel).then(extensionListHandler)
|
||||
});
|
||||
};
|
||||
requestMain(this.requestExtensionsChannel).then(extensionListHandler);
|
||||
subscribeToBroadcast(this.requestExtensionsChannel, (event, extensions: [LensExtensionId, InstalledExtension][]) => {
|
||||
extensionListHandler(extensions)
|
||||
extensionListHandler(extensions);
|
||||
});
|
||||
}
|
||||
|
||||
loadOnMain() {
|
||||
logger.info('[EXTENSIONS-LOADER]: load on main')
|
||||
logger.info('[EXTENSIONS-LOADER]: load on main');
|
||||
this.autoInitExtensions((ext: LensMainExtension) => [
|
||||
registries.menuRegistry.add(ext.appMenus)
|
||||
]);
|
||||
}
|
||||
|
||||
loadOnClusterManagerRenderer() {
|
||||
logger.info('[EXTENSIONS-LOADER]: load on main renderer (cluster manager)')
|
||||
logger.info('[EXTENSIONS-LOADER]: load on main renderer (cluster manager)');
|
||||
this.autoInitExtensions((ext: LensRendererExtension) => [
|
||||
registries.globalPageRegistry.add(ext.globalPages, ext),
|
||||
registries.globalPageMenuRegistry.add(ext.globalPageMenus, ext),
|
||||
@ -95,14 +95,14 @@ export class ExtensionLoader {
|
||||
}
|
||||
|
||||
loadOnClusterRenderer() {
|
||||
logger.info('[EXTENSIONS-LOADER]: load on cluster renderer (dashboard)')
|
||||
logger.info('[EXTENSIONS-LOADER]: load on cluster renderer (dashboard)');
|
||||
this.autoInitExtensions((ext: LensRendererExtension) => [
|
||||
registries.clusterPageRegistry.add(ext.clusterPages, ext),
|
||||
registries.clusterPageMenuRegistry.add(ext.clusterPageMenus, ext),
|
||||
registries.kubeObjectMenuRegistry.add(ext.kubeObjectMenuItems),
|
||||
registries.kubeObjectDetailRegistry.add(ext.kubeObjectDetailItems),
|
||||
registries.kubeObjectStatusRegistry.add(ext.kubeObjectStatusTexts)
|
||||
])
|
||||
]);
|
||||
}
|
||||
|
||||
protected autoInitExtensions(register: (ext: LensExtension) => Function[]) {
|
||||
@ -111,43 +111,43 @@ export class ExtensionLoader {
|
||||
let instance = this.instances.get(extId);
|
||||
if (ext.isEnabled && !instance) {
|
||||
try {
|
||||
const LensExtensionClass: LensExtensionConstructor = this.requireExtension(ext)
|
||||
const LensExtensionClass: LensExtensionConstructor = this.requireExtension(ext);
|
||||
if (!LensExtensionClass) continue;
|
||||
instance = new LensExtensionClass(ext);
|
||||
instance.whenEnabled(() => register(instance));
|
||||
instance.enable();
|
||||
this.instances.set(extId, instance);
|
||||
} catch (err) {
|
||||
logger.error(`[EXTENSION-LOADER]: activation extension error`, { ext, err })
|
||||
logger.error(`[EXTENSION-LOADER]: activation extension error`, { ext, err });
|
||||
}
|
||||
} else if (!ext.isEnabled && instance) {
|
||||
try {
|
||||
instance.disable();
|
||||
this.instances.delete(extId);
|
||||
} catch (err) {
|
||||
logger.error(`[EXTENSION-LOADER]: deactivation extension error`, { ext, err })
|
||||
logger.error(`[EXTENSION-LOADER]: deactivation extension error`, { ext, err });
|
||||
}
|
||||
}
|
||||
}
|
||||
}, {
|
||||
fireImmediately: true,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
protected requireExtension(extension: InstalledExtension) {
|
||||
let extEntrypoint = ""
|
||||
let extEntrypoint = "";
|
||||
try {
|
||||
if (ipcRenderer && extension.manifest.renderer) {
|
||||
extEntrypoint = path.resolve(path.join(path.dirname(extension.manifestPath), extension.manifest.renderer))
|
||||
extEntrypoint = path.resolve(path.join(path.dirname(extension.manifestPath), extension.manifest.renderer));
|
||||
} else if (!ipcRenderer && extension.manifest.main) {
|
||||
extEntrypoint = path.resolve(path.join(path.dirname(extension.manifestPath), extension.manifest.main))
|
||||
extEntrypoint = path.resolve(path.join(path.dirname(extension.manifestPath), extension.manifest.main));
|
||||
}
|
||||
if (extEntrypoint !== "") {
|
||||
return __non_webpack_require__(extEntrypoint).default;
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`[EXTENSION-LOADER]: can't load extension main at ${extEntrypoint}: ${err}`, { extension });
|
||||
console.trace(err)
|
||||
console.trace(err);
|
||||
}
|
||||
}
|
||||
|
||||
@ -159,11 +159,11 @@ export class ExtensionLoader {
|
||||
return toJS(this.extensions, {
|
||||
exportMapsAsObjects: false,
|
||||
recurseEverything: true,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
broadcastExtensions() {
|
||||
broadcastMessage(this.requestExtensionsChannel, Array.from(this.toJSON()))
|
||||
broadcastMessage(this.requestExtensionsChannel, Array.from(this.toJSON()));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import type { LensExtensionId, LensExtensionManifest } from "./lens-extension"
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import fs from "fs-extra"
|
||||
import type { LensExtensionId, LensExtensionManifest } from "./lens-extension";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import fs from "fs-extra";
|
||||
import child_process from "child_process";
|
||||
import logger from "../main/logger"
|
||||
import { extensionPackagesRoot } from "./extension-loader"
|
||||
import { getBundledExtensions } from "../common/utils/app-version"
|
||||
import logger from "../main/logger";
|
||||
import { extensionPackagesRoot } from "./extension-loader";
|
||||
import { getBundledExtensions } from "../common/utils/app-version";
|
||||
|
||||
export interface InstalledExtension {
|
||||
readonly manifest: LensExtensionManifest;
|
||||
@ -31,11 +31,11 @@ export class ExtensionManager {
|
||||
}
|
||||
|
||||
get extensionPackagesRoot() {
|
||||
return extensionPackagesRoot()
|
||||
return extensionPackagesRoot();
|
||||
}
|
||||
|
||||
get inTreeTargetPath() {
|
||||
return path.join(this.extensionPackagesRoot, "extensions")
|
||||
return path.join(this.extensionPackagesRoot, "extensions");
|
||||
}
|
||||
|
||||
get inTreeFolderPath(): string {
|
||||
@ -43,7 +43,7 @@ export class ExtensionManager {
|
||||
}
|
||||
|
||||
get nodeModulesPath(): string {
|
||||
return path.join(this.extensionPackagesRoot, "node_modules")
|
||||
return path.join(this.extensionPackagesRoot, "node_modules");
|
||||
}
|
||||
|
||||
get localFolderPath(): string {
|
||||
@ -51,30 +51,30 @@ export class ExtensionManager {
|
||||
}
|
||||
|
||||
get npmPath() {
|
||||
return __non_webpack_require__.resolve('npm/bin/npm-cli')
|
||||
return __non_webpack_require__.resolve('npm/bin/npm-cli');
|
||||
}
|
||||
|
||||
get packageJsonPath() {
|
||||
return path.join(this.extensionPackagesRoot, "package.json")
|
||||
return path.join(this.extensionPackagesRoot, "package.json");
|
||||
}
|
||||
|
||||
async load(): Promise<Map<LensExtensionId, InstalledExtension>> {
|
||||
logger.info("[EXTENSION-MANAGER] loading extensions from " + this.extensionPackagesRoot)
|
||||
logger.info("[EXTENSION-MANAGER] loading extensions from " + this.extensionPackagesRoot);
|
||||
if (fs.existsSync(path.join(this.extensionPackagesRoot, "package-lock.json"))) {
|
||||
await fs.remove(path.join(this.extensionPackagesRoot, "package-lock.json"))
|
||||
await fs.remove(path.join(this.extensionPackagesRoot, "package-lock.json"));
|
||||
}
|
||||
try {
|
||||
await fs.access(this.inTreeFolderPath, fs.constants.W_OK)
|
||||
this.bundledFolderPath = this.inTreeFolderPath
|
||||
await fs.access(this.inTreeFolderPath, fs.constants.W_OK);
|
||||
this.bundledFolderPath = this.inTreeFolderPath;
|
||||
} catch {
|
||||
// we need to copy in-tree extensions so that we can symlink them properly on "npm install"
|
||||
await fs.remove(this.inTreeTargetPath)
|
||||
await fs.ensureDir(this.inTreeTargetPath)
|
||||
await fs.copy(this.inTreeFolderPath, this.inTreeTargetPath)
|
||||
this.bundledFolderPath = this.inTreeTargetPath
|
||||
await fs.remove(this.inTreeTargetPath);
|
||||
await fs.ensureDir(this.inTreeTargetPath);
|
||||
await fs.copy(this.inTreeFolderPath, this.inTreeTargetPath);
|
||||
this.bundledFolderPath = this.inTreeTargetPath;
|
||||
}
|
||||
await fs.ensureDir(this.nodeModulesPath)
|
||||
await fs.ensureDir(this.localFolderPath)
|
||||
await fs.ensureDir(this.nodeModulesPath);
|
||||
await fs.ensureDir(this.localFolderPath);
|
||||
return await this.loadExtensions();
|
||||
}
|
||||
|
||||
@ -82,16 +82,16 @@ export class ExtensionManager {
|
||||
let manifestJson: LensExtensionManifest;
|
||||
try {
|
||||
fs.accessSync(manifestPath, fs.constants.F_OK); // check manifest file for existence
|
||||
manifestJson = __non_webpack_require__(manifestPath)
|
||||
this.packagesJson.dependencies[manifestJson.name] = path.dirname(manifestPath)
|
||||
manifestJson = __non_webpack_require__(manifestPath);
|
||||
this.packagesJson.dependencies[manifestJson.name] = path.dirname(manifestPath);
|
||||
|
||||
logger.info("[EXTENSION-MANAGER] installed extension " + manifestJson.name)
|
||||
logger.info("[EXTENSION-MANAGER] installed extension " + manifestJson.name);
|
||||
return {
|
||||
manifestPath: path.join(this.nodeModulesPath, manifestJson.name, "package.json"),
|
||||
manifest: manifestJson,
|
||||
isBundled: isBundled,
|
||||
isEnabled: isBundled,
|
||||
}
|
||||
};
|
||||
} catch (err) {
|
||||
logger.error(`[EXTENSION-MANAGER]: can't install extension at ${manifestPath}: ${err}`, { manifestJson });
|
||||
}
|
||||
@ -102,65 +102,65 @@ export class ExtensionManager {
|
||||
const child = child_process.fork(this.npmPath, ["install", "--silent", "--no-audit", "--only=prod", "--prefer-offline", "--no-package-lock"], {
|
||||
cwd: extensionPackagesRoot(),
|
||||
silent: true
|
||||
})
|
||||
});
|
||||
child.on("close", () => {
|
||||
resolve()
|
||||
})
|
||||
resolve();
|
||||
});
|
||||
child.on("error", (err) => {
|
||||
reject(err)
|
||||
})
|
||||
})
|
||||
reject(err);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async loadExtensions() {
|
||||
const bundledExtensions = await this.loadBundledExtensions()
|
||||
const localExtensions = await this.loadFromFolder(this.localFolderPath)
|
||||
await fs.writeFile(path.join(this.packageJsonPath), JSON.stringify(this.packagesJson, null, 2), { mode: 0o600 })
|
||||
await this.installPackages()
|
||||
const extensions = bundledExtensions.concat(localExtensions)
|
||||
const bundledExtensions = await this.loadBundledExtensions();
|
||||
const localExtensions = await this.loadFromFolder(this.localFolderPath);
|
||||
await fs.writeFile(path.join(this.packageJsonPath), JSON.stringify(this.packagesJson, null, 2), { mode: 0o600 });
|
||||
await this.installPackages();
|
||||
const extensions = bundledExtensions.concat(localExtensions);
|
||||
return new Map(extensions.map(ext => [ext.manifestPath, ext]));
|
||||
}
|
||||
|
||||
async loadBundledExtensions() {
|
||||
const extensions: InstalledExtension[] = []
|
||||
const folderPath = this.bundledFolderPath
|
||||
const bundledExtensions = getBundledExtensions()
|
||||
const extensions: InstalledExtension[] = [];
|
||||
const folderPath = this.bundledFolderPath;
|
||||
const bundledExtensions = getBundledExtensions();
|
||||
const paths = await fs.readdir(folderPath);
|
||||
for (const fileName of paths) {
|
||||
if (!bundledExtensions.includes(fileName)) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const absPath = path.resolve(folderPath, fileName);
|
||||
const manifestPath = path.resolve(absPath, "package.json");
|
||||
const ext = await this.getByManifest(manifestPath, { isBundled: true }).catch(() => null)
|
||||
const ext = await this.getByManifest(manifestPath, { isBundled: true }).catch(() => null);
|
||||
if (ext) {
|
||||
extensions.push(ext)
|
||||
extensions.push(ext);
|
||||
}
|
||||
}
|
||||
logger.debug(`[EXTENSION-MANAGER]: ${extensions.length} extensions loaded`, { folderPath, extensions });
|
||||
return extensions
|
||||
return extensions;
|
||||
}
|
||||
|
||||
async loadFromFolder(folderPath: string): Promise<InstalledExtension[]> {
|
||||
const bundledExtensions = getBundledExtensions()
|
||||
const extensions: InstalledExtension[] = []
|
||||
const bundledExtensions = getBundledExtensions();
|
||||
const extensions: InstalledExtension[] = [];
|
||||
const paths = await fs.readdir(folderPath);
|
||||
for (const fileName of paths) {
|
||||
if (bundledExtensions.includes(fileName)) { // do no allow to override bundled extensions
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const absPath = path.resolve(folderPath, fileName);
|
||||
if (!fs.existsSync(absPath)) {
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const lstat = await fs.lstat(absPath)
|
||||
const lstat = await fs.lstat(absPath);
|
||||
if (!lstat.isDirectory() && !lstat.isSymbolicLink()) { // skip non-directories
|
||||
continue
|
||||
continue;
|
||||
}
|
||||
const manifestPath = path.resolve(absPath, "package.json");
|
||||
const ext = await this.getByManifest(manifestPath).catch(() => null)
|
||||
const ext = await this.getByManifest(manifestPath).catch(() => null);
|
||||
if (ext) {
|
||||
extensions.push(ext)
|
||||
extensions.push(ext);
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,4 +169,4 @@ export class ExtensionManager {
|
||||
}
|
||||
}
|
||||
|
||||
export const extensionManager = new ExtensionManager()
|
||||
export const extensionManager = new ExtensionManager();
|
||||
|
||||
@ -1,21 +1,21 @@
|
||||
import { BaseStore } from "../common/base-store"
|
||||
import * as path from "path"
|
||||
import { LensExtension } from "./lens-extension"
|
||||
import { BaseStore } from "../common/base-store";
|
||||
import * as path from "path";
|
||||
import { LensExtension } from "./lens-extension";
|
||||
|
||||
export abstract class ExtensionStore<T> extends BaseStore<T> {
|
||||
protected extension: LensExtension
|
||||
|
||||
async loadExtension(extension: LensExtension) {
|
||||
this.extension = extension
|
||||
return super.load()
|
||||
this.extension = extension;
|
||||
return super.load();
|
||||
}
|
||||
|
||||
async load() {
|
||||
if (!this.extension) { return }
|
||||
return super.load()
|
||||
if (!this.extension) { return; }
|
||||
return super.load();
|
||||
}
|
||||
|
||||
protected cwd() {
|
||||
return path.join(super.cwd(), "extension-store", this.extension.name)
|
||||
return path.join(super.cwd(), "extension-store", this.extension.name);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import type { LensExtensionId } from "./lens-extension";
|
||||
import type { ExtensionLoader } from "./extension-loader";
|
||||
import { BaseStore } from "../common/base-store"
|
||||
import { BaseStore } from "../common/base-store";
|
||||
import { action, observable, reaction, toJS } from "mobx";
|
||||
|
||||
export interface LensExtensionsStoreModel {
|
||||
@ -25,9 +25,9 @@ export class ExtensionsStore extends BaseStore<LensExtensionsStoreModel> {
|
||||
return Array.from(extensionLoader.userExtensions).reduce((state, [extId, ext]) => {
|
||||
state[extId] = {
|
||||
enabled: ext.isEnabled,
|
||||
}
|
||||
};
|
||||
return state;
|
||||
}, state)
|
||||
}, state);
|
||||
}
|
||||
|
||||
async manageState(extensionLoader: ExtensionLoader) {
|
||||
@ -46,13 +46,13 @@ export class ExtensionsStore extends BaseStore<LensExtensionsStoreModel> {
|
||||
if (ext && !ext.isBundled) {
|
||||
ext.isEnabled = state.enabled;
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
// save state on change `extension.isEnabled`
|
||||
reaction(() => this.getState(extensionLoader), extensionsState => {
|
||||
this.state.merge(extensionsState)
|
||||
})
|
||||
this.state.merge(extensionsState);
|
||||
});
|
||||
}
|
||||
|
||||
isEnabled(extId: LensExtensionId) {
|
||||
@ -70,7 +70,7 @@ export class ExtensionsStore extends BaseStore<LensExtensionsStoreModel> {
|
||||
extensions: this.state.toJSON(),
|
||||
}, {
|
||||
recurseEverything: true
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1 +1 @@
|
||||
export * from "./registrations"
|
||||
export * from "./registrations";
|
||||
@ -1,8 +1,8 @@
|
||||
export type { AppPreferenceRegistration, AppPreferenceComponents } from "../registries/app-preference-registry"
|
||||
export type { ClusterFeatureRegistration, ClusterFeatureComponents } from "../registries/cluster-feature-registry"
|
||||
export type { KubeObjectDetailRegistration, KubeObjectDetailComponents } from "../registries/kube-object-detail-registry"
|
||||
export type { KubeObjectMenuRegistration, KubeObjectMenuComponents } from "../registries/kube-object-menu-registry"
|
||||
export type { KubeObjectStatusRegistration } from "../registries/kube-object-status-registry"
|
||||
export type { PageRegistration, PageComponents } from "../registries/page-registry"
|
||||
export type { PageMenuRegistration, PageMenuComponents } from "../registries/page-menu-registry"
|
||||
export type { StatusBarRegistration } from "../registries/status-bar-registry"
|
||||
export type { AppPreferenceRegistration, AppPreferenceComponents } from "../registries/app-preference-registry";
|
||||
export type { ClusterFeatureRegistration, ClusterFeatureComponents } from "../registries/cluster-feature-registry";
|
||||
export type { KubeObjectDetailRegistration, KubeObjectDetailComponents } from "../registries/kube-object-detail-registry";
|
||||
export type { KubeObjectMenuRegistration, KubeObjectMenuComponents } from "../registries/kube-object-menu-registry";
|
||||
export type { KubeObjectStatusRegistration } from "../registries/kube-object-status-registry";
|
||||
export type { PageRegistration, PageComponents } from "../registries/page-registry";
|
||||
export type { PageMenuRegistration, PageMenuComponents } from "../registries/page-menu-registry";
|
||||
export type { StatusBarRegistration } from "../registries/status-bar-registry";
|
||||
@ -21,9 +21,9 @@ export class LensExtension {
|
||||
@observable private isEnabled = false;
|
||||
|
||||
constructor({ manifest, manifestPath, isBundled }: InstalledExtension) {
|
||||
this.manifest = manifest
|
||||
this.manifestPath = manifestPath
|
||||
this.isBundled = !!isBundled
|
||||
this.manifest = manifest;
|
||||
this.manifestPath = manifestPath;
|
||||
this.isBundled = !!isBundled;
|
||||
}
|
||||
|
||||
get id(): LensExtensionId {
|
||||
@ -31,15 +31,15 @@ export class LensExtension {
|
||||
}
|
||||
|
||||
get name() {
|
||||
return this.manifest.name
|
||||
return this.manifest.name;
|
||||
}
|
||||
|
||||
get version() {
|
||||
return this.manifest.version
|
||||
return this.manifest.version;
|
||||
}
|
||||
|
||||
get description() {
|
||||
return this.manifest.description
|
||||
return this.manifest.description;
|
||||
}
|
||||
|
||||
@action
|
||||
@ -60,18 +60,18 @@ export class LensExtension {
|
||||
|
||||
toggle(enable?: boolean) {
|
||||
if (typeof enable === "boolean") {
|
||||
enable ? this.enable() : this.disable()
|
||||
enable ? this.enable() : this.disable();
|
||||
} else {
|
||||
this.isEnabled ? this.disable() : this.enable()
|
||||
this.isEnabled ? this.disable() : this.enable();
|
||||
}
|
||||
}
|
||||
|
||||
async whenEnabled(handlers: () => Function[]) {
|
||||
const disposers: Function[] = [];
|
||||
const unregisterHandlers = () => {
|
||||
disposers.forEach(unregister => unregister())
|
||||
disposers.forEach(unregister => unregister());
|
||||
disposers.length = 0;
|
||||
}
|
||||
};
|
||||
const cancelReaction = reaction(() => this.isEnabled, isEnabled => {
|
||||
if (isEnabled) {
|
||||
disposers.push(...handlers());
|
||||
@ -80,11 +80,11 @@ export class LensExtension {
|
||||
}
|
||||
}, {
|
||||
fireImmediately: true
|
||||
})
|
||||
});
|
||||
return () => {
|
||||
unregisterHandlers();
|
||||
cancelReaction();
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected onActivate() {
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import type { MenuRegistration } from "./registries/menu-registry";
|
||||
import { observable } from "mobx";
|
||||
import { LensExtension } from "./lens-extension"
|
||||
import { LensExtension } from "./lens-extension";
|
||||
import { WindowManager } from "../main/window-manager";
|
||||
import { getExtensionPageUrl } from "./registries/page-registry"
|
||||
import { getExtensionPageUrl } from "./registries/page-registry";
|
||||
|
||||
export class LensMainExtension extends LensExtension {
|
||||
@observable.shallow appMenus: MenuRegistration[] = []
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import type { AppPreferenceRegistration, ClusterFeatureRegistration, KubeObjectDetailRegistration, KubeObjectMenuRegistration, KubeObjectStatusRegistration, PageMenuRegistration, PageRegistration, StatusBarRegistration, } from "./registries"
|
||||
import type { AppPreferenceRegistration, ClusterFeatureRegistration, KubeObjectDetailRegistration, KubeObjectMenuRegistration, KubeObjectStatusRegistration, PageMenuRegistration, PageRegistration, StatusBarRegistration, } from "./registries";
|
||||
import { observable } from "mobx";
|
||||
import { LensExtension } from "./lens-extension"
|
||||
import { getExtensionPageUrl } from "./registries/page-registry"
|
||||
import { LensExtension } from "./lens-extension";
|
||||
import { getExtensionPageUrl } from "./registries/page-registry";
|
||||
|
||||
export class LensRendererExtension extends LensExtension {
|
||||
@observable.shallow globalPages: PageRegistration[] = []
|
||||
|
||||
@ -1,8 +1,8 @@
|
||||
import { getExtensionPageUrl, globalPageRegistry, PageRegistration } from "../page-registry"
|
||||
import { LensExtension } from "../../lens-extension"
|
||||
import { getExtensionPageUrl, globalPageRegistry, PageRegistration } from "../page-registry";
|
||||
import { LensExtension } from "../../lens-extension";
|
||||
import React from "react";
|
||||
|
||||
let ext: LensExtension = null
|
||||
let ext: LensExtension = null;
|
||||
|
||||
describe("getPageUrl", () => {
|
||||
beforeEach(async () => {
|
||||
@ -14,25 +14,25 @@ describe("getPageUrl", () => {
|
||||
manifestPath: "/this/is/fake/package.json",
|
||||
isBundled: false,
|
||||
isEnabled: true
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
it("returns a page url for extension", () => {
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name })).toBe("/extension/foo-bar")
|
||||
})
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name })).toBe("/extension/foo-bar");
|
||||
});
|
||||
|
||||
it("allows to pass base url as parameter", () => {
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name, pageId: "/test" })).toBe("/extension/foo-bar/test")
|
||||
})
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name, pageId: "/test" })).toBe("/extension/foo-bar/test");
|
||||
});
|
||||
|
||||
it("removes @", () => {
|
||||
expect(getExtensionPageUrl({ extensionId: "@foo/bar" })).toBe("/extension/foo-bar")
|
||||
})
|
||||
expect(getExtensionPageUrl({ extensionId: "@foo/bar" })).toBe("/extension/foo-bar");
|
||||
});
|
||||
|
||||
it("adds / prefix", () => {
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name, pageId: "test" })).toBe("/extension/foo-bar/test")
|
||||
})
|
||||
})
|
||||
expect(getExtensionPageUrl({ extensionId: ext.name, pageId: "test" })).toBe("/extension/foo-bar/test");
|
||||
});
|
||||
});
|
||||
|
||||
describe("globalPageRegistry", () => {
|
||||
beforeEach(async () => {
|
||||
@ -44,7 +44,7 @@ describe("globalPageRegistry", () => {
|
||||
manifestPath: "/this/is/fake/package.json",
|
||||
isBundled: false,
|
||||
isEnabled: true
|
||||
})
|
||||
});
|
||||
globalPageRegistry.add([
|
||||
{
|
||||
id: "test-page",
|
||||
@ -63,12 +63,12 @@ describe("globalPageRegistry", () => {
|
||||
Page: () => React.createElement('Default')
|
||||
}
|
||||
},
|
||||
], ext)
|
||||
})
|
||||
], ext);
|
||||
});
|
||||
|
||||
describe("getByPageMenuTarget", () => {
|
||||
it("matching to first registered page without id", () => {
|
||||
const page = globalPageRegistry.getByPageMenuTarget({ extensionId: ext.name })
|
||||
const page = globalPageRegistry.getByPageMenuTarget({ extensionId: ext.name });
|
||||
expect(page.id).toEqual(undefined);
|
||||
expect(page.extensionId).toEqual(ext.name);
|
||||
expect(page.routePath).toEqual(getExtensionPageUrl({ extensionId: ext.name }));
|
||||
@ -78,16 +78,16 @@ describe("globalPageRegistry", () => {
|
||||
const page = globalPageRegistry.getByPageMenuTarget({
|
||||
pageId: "test-page",
|
||||
extensionId: ext.name
|
||||
})
|
||||
expect(page.id).toEqual("test-page")
|
||||
})
|
||||
});
|
||||
expect(page.id).toEqual("test-page");
|
||||
});
|
||||
|
||||
it("returns null if target not found", () => {
|
||||
const page = globalPageRegistry.getByPageMenuTarget({
|
||||
pageId: "wrong-page",
|
||||
extensionId: ext.name
|
||||
})
|
||||
expect(page).toBeNull()
|
||||
})
|
||||
})
|
||||
})
|
||||
});
|
||||
expect(page).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type React from "react"
|
||||
import type React from "react";
|
||||
import { BaseRegistry } from "./base-registry";
|
||||
|
||||
export interface AppPreferenceComponents {
|
||||
@ -14,4 +14,4 @@ export interface AppPreferenceRegistration {
|
||||
export class AppPreferenceRegistry extends BaseRegistry<AppPreferenceRegistration> {
|
||||
}
|
||||
|
||||
export const appPreferenceRegistry = new AppPreferenceRegistry()
|
||||
export const appPreferenceRegistry = new AppPreferenceRegistry();
|
||||
|
||||
@ -12,7 +12,7 @@ export class BaseRegistry<T = object, I extends T = T> {
|
||||
add(items: T | T[], ext?: LensExtension): () => void; // allow method overloading with required "ext"
|
||||
@action
|
||||
add(items: T | T[]) {
|
||||
const normalizedItems = (Array.isArray(items) ? items : [items])
|
||||
const normalizedItems = (Array.isArray(items) ? items : [items]);
|
||||
this.items.push(...normalizedItems);
|
||||
return () => this.remove(...normalizedItems);
|
||||
}
|
||||
@ -21,6 +21,6 @@ export class BaseRegistry<T = object, I extends T = T> {
|
||||
remove(...items: T[]) {
|
||||
items.forEach(item => {
|
||||
this.items.remove(item); // works because of {deep: false};
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import type React from "react"
|
||||
import type React from "react";
|
||||
import { BaseRegistry } from "./base-registry";
|
||||
import { ClusterFeature } from "../cluster-feature";
|
||||
|
||||
@ -15,4 +15,4 @@ export interface ClusterFeatureRegistration {
|
||||
export class ClusterFeatureRegistry extends BaseRegistry<ClusterFeatureRegistration> {
|
||||
}
|
||||
|
||||
export const clusterFeatureRegistry = new ClusterFeatureRegistry()
|
||||
export const clusterFeatureRegistry = new ClusterFeatureRegistry();
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
// All registries managed by extensions api
|
||||
|
||||
export * from "./page-registry"
|
||||
export * from "./page-menu-registry"
|
||||
export * from "./menu-registry"
|
||||
export * from "./app-preference-registry"
|
||||
export * from "./status-bar-registry"
|
||||
export * from "./page-registry";
|
||||
export * from "./page-menu-registry";
|
||||
export * from "./menu-registry";
|
||||
export * from "./app-preference-registry";
|
||||
export * from "./status-bar-registry";
|
||||
export * from "./kube-object-detail-registry";
|
||||
export * from "./kube-object-menu-registry";
|
||||
export * from "./cluster-feature-registry"
|
||||
export * from "./kube-object-status-registry"
|
||||
export * from "./cluster-feature-registry";
|
||||
export * from "./kube-object-status-registry";
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import React from "react"
|
||||
import React from "react";
|
||||
import { BaseRegistry } from "./base-registry";
|
||||
|
||||
export interface KubeObjectDetailComponents {
|
||||
@ -15,15 +15,15 @@ export interface KubeObjectDetailRegistration {
|
||||
export class KubeObjectDetailRegistry extends BaseRegistry<KubeObjectDetailRegistration> {
|
||||
getItemsForKind(kind: string, apiVersion: string) {
|
||||
const items = this.getItems().filter((item) => {
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion)
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion);
|
||||
}).map((item) => {
|
||||
if (item.priority === null) {
|
||||
item.priority = 50
|
||||
item.priority = 50;
|
||||
}
|
||||
return item
|
||||
})
|
||||
return items.sort((a, b) => b.priority - a.priority)
|
||||
return item;
|
||||
});
|
||||
return items.sort((a, b) => b.priority - a.priority);
|
||||
}
|
||||
}
|
||||
|
||||
export const kubeObjectDetailRegistry = new KubeObjectDetailRegistry()
|
||||
export const kubeObjectDetailRegistry = new KubeObjectDetailRegistry();
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import React from "react"
|
||||
import React from "react";
|
||||
import { BaseRegistry } from "./base-registry";
|
||||
|
||||
export interface KubeObjectMenuComponents {
|
||||
@ -14,9 +14,9 @@ export interface KubeObjectMenuRegistration {
|
||||
export class KubeObjectMenuRegistry extends BaseRegistry<KubeObjectMenuRegistration> {
|
||||
getItemsForKind(kind: string, apiVersion: string) {
|
||||
return this.getItems().filter((item) => {
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion)
|
||||
})
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export const kubeObjectMenuRegistry = new KubeObjectMenuRegistry()
|
||||
export const kubeObjectMenuRegistry = new KubeObjectMenuRegistry();
|
||||
|
||||
@ -10,8 +10,8 @@ export interface KubeObjectStatusRegistration {
|
||||
export class KubeObjectStatusRegistry extends BaseRegistry<KubeObjectStatusRegistration> {
|
||||
getItemsForKind(kind: string, apiVersion: string) {
|
||||
return this.getItems().filter((item) => {
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion)
|
||||
})
|
||||
return item.kind === kind && item.apiVersions.includes(apiVersion);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -29,8 +29,8 @@ export class PageMenuRegistry extends BaseRegistry<PageMenuRegistration, Require
|
||||
extensionId: ext.name,
|
||||
...(menuItem.target || {}),
|
||||
};
|
||||
return menuItem
|
||||
})
|
||||
return menuItem;
|
||||
});
|
||||
return super.add(normalizedItems);
|
||||
}
|
||||
}
|
||||
|
||||
@ -45,7 +45,7 @@ export interface PageComponents {
|
||||
}
|
||||
|
||||
export function sanitizeExtensionName(name: string) {
|
||||
return name.replace("@", "").replace("/", "-")
|
||||
return name.replace("@", "").replace("/", "-");
|
||||
}
|
||||
|
||||
export function getExtensionPageUrl<P extends object>({ extensionId, pageId = "", params }: PageMenuTarget<P>): string {
|
||||
@ -68,13 +68,13 @@ export class PageRegistry extends BaseRegistry<PageRegistration, RegisteredPage>
|
||||
...page,
|
||||
extensionId: ext.name,
|
||||
routePath: getExtensionPageUrl({ extensionId: ext.name, pageId: page.id ?? page.routePath }),
|
||||
}))
|
||||
}));
|
||||
} catch (err) {
|
||||
logger.error(`[EXTENSION]: page-registration failed`, {
|
||||
items,
|
||||
extension: ext,
|
||||
error: String(err),
|
||||
})
|
||||
});
|
||||
}
|
||||
return super.add(registeredPages);
|
||||
}
|
||||
|
||||
@ -1,36 +1,36 @@
|
||||
// Common UI components
|
||||
|
||||
// layouts
|
||||
export * from "../../renderer/components/layout/page-layout"
|
||||
export * from "../../renderer/components/layout/wizard-layout"
|
||||
export * from "../../renderer/components/layout/tab-layout"
|
||||
export * from "../../renderer/components/layout/page-layout";
|
||||
export * from "../../renderer/components/layout/wizard-layout";
|
||||
export * from "../../renderer/components/layout/tab-layout";
|
||||
|
||||
// form-controls
|
||||
export * from "../../renderer/components/button"
|
||||
export * from "../../renderer/components/checkbox"
|
||||
export * from "../../renderer/components/radio"
|
||||
export * from "../../renderer/components/select"
|
||||
export * from "../../renderer/components/slider"
|
||||
export * from "../../renderer/components/input/input"
|
||||
export * from "../../renderer/components/button";
|
||||
export * from "../../renderer/components/checkbox";
|
||||
export * from "../../renderer/components/radio";
|
||||
export * from "../../renderer/components/select";
|
||||
export * from "../../renderer/components/slider";
|
||||
export * from "../../renderer/components/input/input";
|
||||
|
||||
// other components
|
||||
export * from "../../renderer/components/icon"
|
||||
export * from "../../renderer/components/tooltip"
|
||||
export * from "../../renderer/components/tabs"
|
||||
export * from "../../renderer/components/table"
|
||||
export * from "../../renderer/components/badge"
|
||||
export * from "../../renderer/components/drawer"
|
||||
export * from "../../renderer/components/dialog"
|
||||
export * from "../../renderer/components/icon";
|
||||
export * from "../../renderer/components/tooltip";
|
||||
export * from "../../renderer/components/tabs";
|
||||
export * from "../../renderer/components/table";
|
||||
export * from "../../renderer/components/badge";
|
||||
export * from "../../renderer/components/drawer";
|
||||
export * from "../../renderer/components/dialog";
|
||||
export * from "../../renderer/components/confirm-dialog";
|
||||
export * from "../../renderer/components/line-progress"
|
||||
export * from "../../renderer/components/menu"
|
||||
export * from "../../renderer/components/notifications"
|
||||
export * from "../../renderer/components/spinner"
|
||||
export * from "../../renderer/components/stepper"
|
||||
export * from "../../renderer/components/line-progress";
|
||||
export * from "../../renderer/components/menu";
|
||||
export * from "../../renderer/components/notifications";
|
||||
export * from "../../renderer/components/spinner";
|
||||
export * from "../../renderer/components/stepper";
|
||||
|
||||
// kube helpers
|
||||
export * from "../../renderer/components/kube-object"
|
||||
export * from "../../renderer/components/+events/kube-event-details"
|
||||
export * from "../../renderer/components/kube-object";
|
||||
export * from "../../renderer/components/+events/kube-event-details";
|
||||
|
||||
// specific exports
|
||||
export * from "../../renderer/components/status-brick";
|
||||
|
||||
@ -1,14 +1,14 @@
|
||||
// Lens-extensions apis, required in renderer process runtime
|
||||
|
||||
// APIs
|
||||
import * as Component from "./components"
|
||||
import * as K8sApi from "./k8s-api"
|
||||
import * as Navigation from "./navigation"
|
||||
import * as Theme from "./theming"
|
||||
import * as Component from "./components";
|
||||
import * as K8sApi from "./k8s-api";
|
||||
import * as Navigation from "./navigation";
|
||||
import * as Theme from "./theming";
|
||||
|
||||
export {
|
||||
Component,
|
||||
K8sApi,
|
||||
Navigation,
|
||||
Theme,
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
export { isAllowedResource } from "../../common/rbac"
|
||||
export { isAllowedResource } from "../../common/rbac";
|
||||
export { apiManager } from "../../renderer/api/api-manager";
|
||||
export { KubeObjectStore } from "../../renderer/kube-object.store"
|
||||
export { KubeObjectStore } from "../../renderer/kube-object.store";
|
||||
export { KubeApi, forCluster, IKubeApiCluster } from "../../renderer/api/kube-api";
|
||||
export { KubeObject } from "../../renderer/api/kube-object";
|
||||
export { Pod, podsApi, PodsApi, IPodContainer, IPodContainerStatus } from "../../renderer/api/endpoints";
|
||||
@ -31,33 +31,33 @@ export { RoleBinding, roleBindingApi } from "../../renderer/api/endpoints";
|
||||
export { ClusterRole, clusterRoleApi } from "../../renderer/api/endpoints";
|
||||
export { ClusterRoleBinding, clusterRoleBindingApi } from "../../renderer/api/endpoints";
|
||||
export { CustomResourceDefinition, crdApi } from "../../renderer/api/endpoints";
|
||||
export { KubeObjectStatus, KubeObjectStatusLevel } from "./kube-object-status"
|
||||
export { KubeObjectStatus, KubeObjectStatusLevel } from "./kube-object-status";
|
||||
|
||||
// stores
|
||||
export type { EventStore } from "../../renderer/components/+events/event.store"
|
||||
export type { PodsStore } from "../../renderer/components/+workloads-pods/pods.store"
|
||||
export type { NodesStore } from "../../renderer/components/+nodes/nodes.store"
|
||||
export type { DeploymentStore } from "../../renderer/components/+workloads-deployments/deployments.store"
|
||||
export type { DaemonSetStore } from "../../renderer/components/+workloads-daemonsets/daemonsets.store"
|
||||
export type { StatefulSetStore } from "../../renderer/components/+workloads-statefulsets/statefulset.store"
|
||||
export type { JobStore } from "../../renderer/components/+workloads-jobs/job.store"
|
||||
export type { CronJobStore } from "../../renderer/components/+workloads-cronjobs/cronjob.store"
|
||||
export type { ConfigMapsStore } from "../../renderer/components/+config-maps/config-maps.store"
|
||||
export type { SecretsStore } from "../../renderer/components/+config-secrets/secrets.store"
|
||||
export type { ReplicaSetStore } from "../../renderer/components/+workloads-replicasets/replicasets.store"
|
||||
export type { ResourceQuotasStore } from "../../renderer/components/+config-resource-quotas/resource-quotas.store"
|
||||
export type { HPAStore } from "../../renderer/components/+config-autoscalers/hpa.store"
|
||||
export type { PodDisruptionBudgetsStore } from "../../renderer/components/+config-pod-disruption-budgets/pod-disruption-budgets.store"
|
||||
export type { ServiceStore } from "../../renderer/components/+network-services/services.store"
|
||||
export type { EndpointStore } from "../../renderer/components/+network-endpoints/endpoints.store"
|
||||
export type { IngressStore } from "../../renderer/components/+network-ingresses/ingress.store"
|
||||
export type { NetworkPolicyStore } from "../../renderer/components/+network-policies/network-policy.store"
|
||||
export type { PersistentVolumesStore } from "../../renderer/components/+storage-volumes/volumes.store"
|
||||
export type { VolumeClaimStore } from "../../renderer/components/+storage-volume-claims/volume-claim.store"
|
||||
export type { StorageClassStore } from "../../renderer/components/+storage-classes/storage-class.store"
|
||||
export type { NamespaceStore } from "../../renderer/components/+namespaces/namespace.store"
|
||||
export type { ServiceAccountsStore } from "../../renderer/components/+user-management-service-accounts/service-accounts.store"
|
||||
export type { RolesStore } from "../../renderer/components/+user-management-roles/roles.store"
|
||||
export type { RoleBindingsStore } from "../../renderer/components/+user-management-roles-bindings/role-bindings.store"
|
||||
export type { CRDStore } from "../../renderer/components/+custom-resources/crd.store"
|
||||
export type { CRDResourceStore } from "../../renderer/components/+custom-resources/crd-resource.store"
|
||||
export type { EventStore } from "../../renderer/components/+events/event.store";
|
||||
export type { PodsStore } from "../../renderer/components/+workloads-pods/pods.store";
|
||||
export type { NodesStore } from "../../renderer/components/+nodes/nodes.store";
|
||||
export type { DeploymentStore } from "../../renderer/components/+workloads-deployments/deployments.store";
|
||||
export type { DaemonSetStore } from "../../renderer/components/+workloads-daemonsets/daemonsets.store";
|
||||
export type { StatefulSetStore } from "../../renderer/components/+workloads-statefulsets/statefulset.store";
|
||||
export type { JobStore } from "../../renderer/components/+workloads-jobs/job.store";
|
||||
export type { CronJobStore } from "../../renderer/components/+workloads-cronjobs/cronjob.store";
|
||||
export type { ConfigMapsStore } from "../../renderer/components/+config-maps/config-maps.store";
|
||||
export type { SecretsStore } from "../../renderer/components/+config-secrets/secrets.store";
|
||||
export type { ReplicaSetStore } from "../../renderer/components/+workloads-replicasets/replicasets.store";
|
||||
export type { ResourceQuotasStore } from "../../renderer/components/+config-resource-quotas/resource-quotas.store";
|
||||
export type { HPAStore } from "../../renderer/components/+config-autoscalers/hpa.store";
|
||||
export type { PodDisruptionBudgetsStore } from "../../renderer/components/+config-pod-disruption-budgets/pod-disruption-budgets.store";
|
||||
export type { ServiceStore } from "../../renderer/components/+network-services/services.store";
|
||||
export type { EndpointStore } from "../../renderer/components/+network-endpoints/endpoints.store";
|
||||
export type { IngressStore } from "../../renderer/components/+network-ingresses/ingress.store";
|
||||
export type { NetworkPolicyStore } from "../../renderer/components/+network-policies/network-policy.store";
|
||||
export type { PersistentVolumesStore } from "../../renderer/components/+storage-volumes/volumes.store";
|
||||
export type { VolumeClaimStore } from "../../renderer/components/+storage-volume-claims/volume-claim.store";
|
||||
export type { StorageClassStore } from "../../renderer/components/+storage-classes/storage-class.store";
|
||||
export type { NamespaceStore } from "../../renderer/components/+namespaces/namespace.store";
|
||||
export type { ServiceAccountsStore } from "../../renderer/components/+user-management-service-accounts/service-accounts.store";
|
||||
export type { RolesStore } from "../../renderer/components/+user-management-roles/roles.store";
|
||||
export type { RoleBindingsStore } from "../../renderer/components/+user-management-roles-bindings/role-bindings.store";
|
||||
export type { CRDStore } from "../../renderer/components/+custom-resources/crd.store";
|
||||
export type { CRDResourceStore } from "../../renderer/components/+custom-resources/crd-resource.store";
|
||||
|
||||
@ -1,3 +1,3 @@
|
||||
export { navigate } from "../../renderer/navigation";
|
||||
export { hideDetails, showDetails, getDetailsUrl } from "../../renderer/navigation"
|
||||
export { hideDetails, showDetails, getDetailsUrl } from "../../renderer/navigation";
|
||||
export { IURLParams } from "../../common/utils/buildUrl";
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
|
||||
import fetchMock from "jest-fetch-mock"
|
||||
import fetchMock from "jest-fetch-mock";
|
||||
// rewire global.fetch to call 'fetchMock'
|
||||
fetchMock.enableMocks();
|
||||
|
||||
@ -21,35 +21,35 @@ jest.mock("winston", () => ({
|
||||
Console: jest.fn(),
|
||||
File: jest.fn(),
|
||||
}
|
||||
}))
|
||||
}));
|
||||
|
||||
|
||||
jest.mock("../../common/ipc")
|
||||
jest.mock("../context-handler")
|
||||
jest.mock("request")
|
||||
jest.mock("request-promise-native")
|
||||
jest.mock("../../common/ipc");
|
||||
jest.mock("../context-handler");
|
||||
jest.mock("request");
|
||||
jest.mock("request-promise-native");
|
||||
|
||||
import { Console } from "console";
|
||||
import mockFs from "mock-fs";
|
||||
import { workspaceStore } from "../../common/workspace-store";
|
||||
import { Cluster } from "../cluster"
|
||||
import { Cluster } from "../cluster";
|
||||
import { ContextHandler } from "../context-handler";
|
||||
import { getFreePort } from "../port";
|
||||
import { V1ResourceAttributes } from "@kubernetes/client-node";
|
||||
import { apiResources } from "../../common/rbac";
|
||||
import request from "request-promise-native"
|
||||
import request from "request-promise-native";
|
||||
import { Kubectl } from "../kubectl";
|
||||
|
||||
const mockedRequest = request as jest.MockedFunction<typeof request>
|
||||
const mockedRequest = request as jest.MockedFunction<typeof request>;
|
||||
|
||||
console = new Console(process.stdout, process.stderr) // fix mockFS
|
||||
console = new Console(process.stdout, process.stderr); // fix mockFS
|
||||
|
||||
describe("create clusters", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
let c: Cluster
|
||||
let c: Cluster;
|
||||
|
||||
beforeEach(() => {
|
||||
const mockOpts = {
|
||||
@ -74,68 +74,68 @@ describe("create clusters", () => {
|
||||
kind: "Config",
|
||||
preferences: {},
|
||||
})
|
||||
}
|
||||
mockFs(mockOpts)
|
||||
jest.spyOn(Kubectl.prototype, "ensureKubectl").mockReturnValue(Promise.resolve(true))
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
jest.spyOn(Kubectl.prototype, "ensureKubectl").mockReturnValue(Promise.resolve(true));
|
||||
c = new Cluster({
|
||||
id: "foo",
|
||||
contextName: "minikube",
|
||||
kubeConfigPath: "minikube-config.yml",
|
||||
workspace: workspaceStore.currentWorkspaceId
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("should be able to create a cluster from a cluster model and apiURL should be decoded", () => {
|
||||
expect(c.apiUrl).toBe("https://192.168.64.3:8443")
|
||||
})
|
||||
expect(c.apiUrl).toBe("https://192.168.64.3:8443");
|
||||
});
|
||||
|
||||
it("reconnect should not throw if contextHandler is missing", () => {
|
||||
expect(() => c.reconnect()).not.toThrowError()
|
||||
})
|
||||
expect(() => c.reconnect()).not.toThrowError();
|
||||
});
|
||||
|
||||
it("disconnect should not throw if contextHandler is missing", () => {
|
||||
expect(() => c.disconnect()).not.toThrowError()
|
||||
})
|
||||
expect(() => c.disconnect()).not.toThrowError();
|
||||
});
|
||||
|
||||
it("init should not throw if everything is in order", async () => {
|
||||
await c.init(await getFreePort())
|
||||
await c.init(await getFreePort());
|
||||
expect(logger.info).toBeCalledWith(expect.stringContaining("init success"), {
|
||||
id: "foo",
|
||||
apiUrl: "https://192.168.64.3:8443",
|
||||
context: "minikube",
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
it("activating cluster should try to connect to cluster and do a refresh", async () => {
|
||||
const port = await getFreePort()
|
||||
const port = await getFreePort();
|
||||
jest.spyOn(ContextHandler.prototype, "ensureServer");
|
||||
|
||||
const mockListNSs = jest.fn()
|
||||
const mockListNSs = jest.fn();
|
||||
const mockKC = {
|
||||
makeApiClient() {
|
||||
return {
|
||||
listNamespace: mockListNSs,
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
jest.spyOn(Cluster.prototype, "isClusterAdmin").mockReturnValue(Promise.resolve(true))
|
||||
};
|
||||
jest.spyOn(Cluster.prototype, "isClusterAdmin").mockReturnValue(Promise.resolve(true));
|
||||
jest.spyOn(Cluster.prototype, "canI")
|
||||
.mockImplementationOnce((attr: V1ResourceAttributes): Promise<boolean> => {
|
||||
expect(attr.namespace).toBe("default")
|
||||
expect(attr.resource).toBe("pods")
|
||||
expect(attr.verb).toBe("list")
|
||||
return Promise.resolve(true)
|
||||
expect(attr.namespace).toBe("default");
|
||||
expect(attr.resource).toBe("pods");
|
||||
expect(attr.verb).toBe("list");
|
||||
return Promise.resolve(true);
|
||||
})
|
||||
.mockImplementation((attr: V1ResourceAttributes): Promise<boolean> => {
|
||||
expect(attr.namespace).toBe("default")
|
||||
expect(attr.verb).toBe("list")
|
||||
return Promise.resolve(true)
|
||||
})
|
||||
jest.spyOn(Cluster.prototype, "getProxyKubeconfig").mockReturnValue(mockKC as any)
|
||||
expect(attr.namespace).toBe("default");
|
||||
expect(attr.verb).toBe("list");
|
||||
return Promise.resolve(true);
|
||||
});
|
||||
jest.spyOn(Cluster.prototype, "getProxyKubeconfig").mockReturnValue(mockKC as any);
|
||||
mockListNSs.mockImplementationOnce(() => ({
|
||||
body: {
|
||||
items: [{
|
||||
@ -144,36 +144,36 @@ describe("create clusters", () => {
|
||||
}
|
||||
}]
|
||||
}
|
||||
}))
|
||||
}));
|
||||
|
||||
mockedRequest.mockImplementationOnce(((uri: any, _options: any) => {
|
||||
expect(uri).toBe(`http://localhost:${port}/api-kube/version`)
|
||||
return Promise.resolve({ gitVersion: "1.2.3" })
|
||||
}) as any)
|
||||
expect(uri).toBe(`http://localhost:${port}/api-kube/version`);
|
||||
return Promise.resolve({ gitVersion: "1.2.3" });
|
||||
}) as any);
|
||||
|
||||
const c = new class extends Cluster {
|
||||
// only way to mock protected methods, without these we leak promises
|
||||
protected bindEvents() {
|
||||
return
|
||||
return;
|
||||
}
|
||||
protected async ensureKubectl() {
|
||||
return Promise.resolve(true)
|
||||
return Promise.resolve(true);
|
||||
}
|
||||
}({
|
||||
id: "foo",
|
||||
contextName: "minikube",
|
||||
kubeConfigPath: "minikube-config.yml",
|
||||
workspace: workspaceStore.currentWorkspaceId
|
||||
})
|
||||
await c.init(port)
|
||||
await c.activate()
|
||||
});
|
||||
await c.init(port);
|
||||
await c.activate();
|
||||
|
||||
expect(ContextHandler.prototype.ensureServer).toBeCalled()
|
||||
expect(mockedRequest).toBeCalled()
|
||||
expect(c.accessible).toBe(true)
|
||||
expect(c.allowedNamespaces.length).toBe(1)
|
||||
expect(c.allowedResources.length).toBe(apiResources.length)
|
||||
c.disconnect()
|
||||
jest.resetAllMocks()
|
||||
})
|
||||
})
|
||||
expect(ContextHandler.prototype.ensureServer).toBeCalled();
|
||||
expect(mockedRequest).toBeCalled();
|
||||
expect(c.accessible).toBe(true);
|
||||
expect(c.allowedNamespaces.length).toBe(1);
|
||||
expect(c.allowedResources.length).toBe(apiResources.length);
|
||||
c.disconnect();
|
||||
jest.resetAllMocks();
|
||||
});
|
||||
});
|
||||
|
||||
@ -21,109 +21,109 @@ jest.mock("winston", () => ({
|
||||
Console: jest.fn(),
|
||||
File: jest.fn(),
|
||||
}
|
||||
}))
|
||||
}));
|
||||
|
||||
jest.mock("../../common/ipc")
|
||||
jest.mock("child_process")
|
||||
jest.mock("tcp-port-used")
|
||||
jest.mock("../../common/ipc");
|
||||
jest.mock("child_process");
|
||||
jest.mock("tcp-port-used");
|
||||
|
||||
import { Cluster } from "../cluster"
|
||||
import { KubeAuthProxy } from "../kube-auth-proxy"
|
||||
import { getFreePort } from "../port"
|
||||
import { broadcastMessage } from "../../common/ipc"
|
||||
import { ChildProcess, spawn, SpawnOptions } from "child_process"
|
||||
import { bundledKubectlPath, Kubectl } from "../kubectl"
|
||||
import { Cluster } from "../cluster";
|
||||
import { KubeAuthProxy } from "../kube-auth-proxy";
|
||||
import { getFreePort } from "../port";
|
||||
import { broadcastMessage } from "../../common/ipc";
|
||||
import { ChildProcess, spawn, SpawnOptions } from "child_process";
|
||||
import { bundledKubectlPath, Kubectl } from "../kubectl";
|
||||
import { mock, MockProxy } from 'jest-mock-extended';
|
||||
import { waitUntilUsed } from 'tcp-port-used';
|
||||
import { Readable } from "stream"
|
||||
import { Readable } from "stream";
|
||||
|
||||
const mockBroadcastIpc = broadcastMessage as jest.MockedFunction<typeof broadcastMessage>
|
||||
const mockSpawn = spawn as jest.MockedFunction<typeof spawn>
|
||||
const mockWaitUntilUsed = waitUntilUsed as jest.MockedFunction<typeof waitUntilUsed>
|
||||
const mockBroadcastIpc = broadcastMessage as jest.MockedFunction<typeof broadcastMessage>;
|
||||
const mockSpawn = spawn as jest.MockedFunction<typeof spawn>;
|
||||
const mockWaitUntilUsed = waitUntilUsed as jest.MockedFunction<typeof waitUntilUsed>;
|
||||
|
||||
describe("kube auth proxy tests", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
it("calling exit multiple times shouldn't throw", async () => {
|
||||
const port = await getFreePort()
|
||||
const kap = new KubeAuthProxy(new Cluster({ id: "foobar", kubeConfigPath: "fake-path.yml" }), port, {})
|
||||
kap.exit()
|
||||
kap.exit()
|
||||
kap.exit()
|
||||
})
|
||||
const port = await getFreePort();
|
||||
const kap = new KubeAuthProxy(new Cluster({ id: "foobar", kubeConfigPath: "fake-path.yml" }), port, {});
|
||||
kap.exit();
|
||||
kap.exit();
|
||||
kap.exit();
|
||||
});
|
||||
|
||||
describe("spawn tests", () => {
|
||||
let port: number
|
||||
let mockedCP: MockProxy<ChildProcess>
|
||||
let listeners: Record<string, (...args: any[]) => void>
|
||||
let proxy: KubeAuthProxy
|
||||
let port: number;
|
||||
let mockedCP: MockProxy<ChildProcess>;
|
||||
let listeners: Record<string, (...args: any[]) => void>;
|
||||
let proxy: KubeAuthProxy;
|
||||
|
||||
beforeEach(async () => {
|
||||
port = await getFreePort()
|
||||
mockedCP = mock<ChildProcess>()
|
||||
listeners = {}
|
||||
port = await getFreePort();
|
||||
mockedCP = mock<ChildProcess>();
|
||||
listeners = {};
|
||||
|
||||
jest.spyOn(Kubectl.prototype, "checkBinary").mockReturnValueOnce(Promise.resolve(true))
|
||||
jest.spyOn(Kubectl.prototype, "ensureKubectl").mockReturnValueOnce(Promise.resolve(false))
|
||||
jest.spyOn(Kubectl.prototype, "checkBinary").mockReturnValueOnce(Promise.resolve(true));
|
||||
jest.spyOn(Kubectl.prototype, "ensureKubectl").mockReturnValueOnce(Promise.resolve(false));
|
||||
mockedCP.on.mockImplementation((event: string, listener: (message: any, sendHandle: any) => void): ChildProcess => {
|
||||
listeners[event] = listener
|
||||
return mockedCP
|
||||
})
|
||||
mockedCP.stderr = mock<Readable>()
|
||||
listeners[event] = listener;
|
||||
return mockedCP;
|
||||
});
|
||||
mockedCP.stderr = mock<Readable>();
|
||||
mockedCP.stderr.on.mockImplementation((event: string, listener: (message: any, sendHandle: any) => void): Readable => {
|
||||
listeners[`stderr/${event}`] = listener
|
||||
return mockedCP.stderr
|
||||
})
|
||||
mockedCP.stdout = mock<Readable>()
|
||||
listeners[`stderr/${event}`] = listener;
|
||||
return mockedCP.stderr;
|
||||
});
|
||||
mockedCP.stdout = mock<Readable>();
|
||||
mockedCP.stdout.on.mockImplementation((event: string, listener: (message: any, sendHandle: any) => void): Readable => {
|
||||
listeners[`stdout/${event}`] = listener
|
||||
return mockedCP.stdout
|
||||
})
|
||||
listeners[`stdout/${event}`] = listener;
|
||||
return mockedCP.stdout;
|
||||
});
|
||||
mockSpawn.mockImplementationOnce((command: string, args: readonly string[], options: SpawnOptions): ChildProcess => {
|
||||
expect(command).toBe(bundledKubectlPath())
|
||||
return mockedCP
|
||||
})
|
||||
mockWaitUntilUsed.mockReturnValueOnce(Promise.resolve())
|
||||
const cluster = new Cluster({ id: "foobar", kubeConfigPath: "fake-path.yml" })
|
||||
jest.spyOn(cluster, "apiUrl", "get").mockReturnValue("https://fake.k8s.internal")
|
||||
proxy = new KubeAuthProxy(cluster, port, {})
|
||||
})
|
||||
expect(command).toBe(bundledKubectlPath());
|
||||
return mockedCP;
|
||||
});
|
||||
mockWaitUntilUsed.mockReturnValueOnce(Promise.resolve());
|
||||
const cluster = new Cluster({ id: "foobar", kubeConfigPath: "fake-path.yml" });
|
||||
jest.spyOn(cluster, "apiUrl", "get").mockReturnValue("https://fake.k8s.internal");
|
||||
proxy = new KubeAuthProxy(cluster, port, {});
|
||||
});
|
||||
|
||||
it("should call spawn and broadcast errors", async () => {
|
||||
await proxy.run()
|
||||
listeners["error"]({ message: "foobarbat" })
|
||||
await proxy.run();
|
||||
listeners["error"]({ message: "foobarbat" });
|
||||
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "foobarbat", error: true })
|
||||
})
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "foobarbat", error: true });
|
||||
});
|
||||
|
||||
it("should call spawn and broadcast exit", async () => {
|
||||
await proxy.run()
|
||||
listeners["exit"](0)
|
||||
await proxy.run();
|
||||
listeners["exit"](0);
|
||||
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "proxy exited with code: 0", error: false })
|
||||
})
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "proxy exited with code: 0", error: false });
|
||||
});
|
||||
|
||||
it("should call spawn and broadcast errors from stderr", async () => {
|
||||
await proxy.run()
|
||||
listeners["stderr/data"]("an error")
|
||||
await proxy.run();
|
||||
listeners["stderr/data"]("an error");
|
||||
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "an error", error: true })
|
||||
})
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "an error", error: true });
|
||||
});
|
||||
|
||||
it("should call spawn and broadcast stdout serving info", async () => {
|
||||
await proxy.run()
|
||||
listeners["stdout/data"]("Starting to serve on")
|
||||
await proxy.run();
|
||||
listeners["stdout/data"]("Starting to serve on");
|
||||
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "Authentication proxy started\n" })
|
||||
})
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "Authentication proxy started\n" });
|
||||
});
|
||||
|
||||
it("should call spawn and broadcast stdout other info", async () => {
|
||||
await proxy.run()
|
||||
listeners["stdout/data"]("some info")
|
||||
await proxy.run();
|
||||
listeners["stdout/data"]("some info");
|
||||
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "some info" })
|
||||
})
|
||||
})
|
||||
})
|
||||
expect(mockBroadcastIpc).toBeCalledWith("kube-auth:foobar", { data: "some info" });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@ -21,24 +21,24 @@ jest.mock("winston", () => ({
|
||||
Console: jest.fn(),
|
||||
File: jest.fn(),
|
||||
}
|
||||
}))
|
||||
}));
|
||||
|
||||
import { KubeconfigManager } from "../kubeconfig-manager"
|
||||
import mockFs from "mock-fs"
|
||||
import { KubeconfigManager } from "../kubeconfig-manager";
|
||||
import mockFs from "mock-fs";
|
||||
import { Cluster } from "../cluster";
|
||||
import { workspaceStore } from "../../common/workspace-store";
|
||||
import { ContextHandler } from "../context-handler";
|
||||
import { getFreePort } from "../port";
|
||||
import fse from "fs-extra"
|
||||
import fse from "fs-extra";
|
||||
import { loadYaml } from "@kubernetes/client-node";
|
||||
import { Console } from "console";
|
||||
|
||||
console = new Console(process.stdout, process.stderr) // fix mockFS
|
||||
console = new Console(process.stdout, process.stderr); // fix mockFS
|
||||
|
||||
describe("kubeconfig manager tests", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
jest.clearAllMocks();
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
const mockOpts = {
|
||||
@ -63,13 +63,13 @@ describe("kubeconfig manager tests", () => {
|
||||
kind: "Config",
|
||||
preferences: {},
|
||||
})
|
||||
}
|
||||
mockFs(mockOpts)
|
||||
})
|
||||
};
|
||||
mockFs(mockOpts);
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
mockFs.restore()
|
||||
})
|
||||
mockFs.restore();
|
||||
});
|
||||
|
||||
it("should create 'temp' kube config with proxy", async () => {
|
||||
const cluster = new Cluster({
|
||||
@ -77,19 +77,19 @@ describe("kubeconfig manager tests", () => {
|
||||
contextName: "minikube",
|
||||
kubeConfigPath: "minikube-config.yml",
|
||||
workspace: workspaceStore.currentWorkspaceId
|
||||
})
|
||||
const contextHandler = new ContextHandler(cluster)
|
||||
const port = await getFreePort()
|
||||
const kubeConfManager = await KubeconfigManager.create(cluster, contextHandler, port)
|
||||
});
|
||||
const contextHandler = new ContextHandler(cluster);
|
||||
const port = await getFreePort();
|
||||
const kubeConfManager = await KubeconfigManager.create(cluster, contextHandler, port);
|
||||
|
||||
expect(logger.error).not.toBeCalled()
|
||||
expect(kubeConfManager.getPath()).toBe("tmp/kubeconfig-foo")
|
||||
const file = await fse.readFile(kubeConfManager.getPath())
|
||||
const yml = loadYaml<any>(file.toString())
|
||||
expect(yml["current-context"]).toBe("minikube")
|
||||
expect(yml["clusters"][0]["cluster"]["server"]).toBe(`http://127.0.0.1:${port}/foo`)
|
||||
expect(yml["users"][0]["name"]).toBe("proxy")
|
||||
})
|
||||
expect(logger.error).not.toBeCalled();
|
||||
expect(kubeConfManager.getPath()).toBe("tmp/kubeconfig-foo");
|
||||
const file = await fse.readFile(kubeConfManager.getPath());
|
||||
const yml = loadYaml<any>(file.toString());
|
||||
expect(yml["current-context"]).toBe("minikube");
|
||||
expect(yml["clusters"][0]["cluster"]["server"]).toBe(`http://127.0.0.1:${port}/foo`);
|
||||
expect(yml["users"][0]["name"]).toBe("proxy");
|
||||
});
|
||||
|
||||
it("should remove 'temp' kube config on unlink and remove reference from inside class", async () => {
|
||||
const cluster = new Cluster({
|
||||
@ -97,16 +97,16 @@ describe("kubeconfig manager tests", () => {
|
||||
contextName: "minikube",
|
||||
kubeConfigPath: "minikube-config.yml",
|
||||
workspace: workspaceStore.currentWorkspaceId
|
||||
})
|
||||
const contextHandler = new ContextHandler(cluster)
|
||||
const port = await getFreePort()
|
||||
const kubeConfManager = await KubeconfigManager.create(cluster, contextHandler, port)
|
||||
});
|
||||
const contextHandler = new ContextHandler(cluster);
|
||||
const port = await getFreePort();
|
||||
const kubeConfManager = await KubeconfigManager.create(cluster, contextHandler, port);
|
||||
|
||||
const configPath = kubeConfManager.getPath()
|
||||
expect(await fse.pathExists(configPath)).toBe(true)
|
||||
await kubeConfManager.unlink()
|
||||
expect(await fse.pathExists(configPath)).toBe(false)
|
||||
await kubeConfManager.unlink() // doesn't throw
|
||||
expect(kubeConfManager.getPath()).toBeUndefined()
|
||||
})
|
||||
})
|
||||
const configPath = kubeConfManager.getPath();
|
||||
expect(await fse.pathExists(configPath)).toBe(true);
|
||||
await kubeConfManager.unlink();
|
||||
expect(await fse.pathExists(configPath)).toBe(false);
|
||||
await kubeConfManager.unlink(); // doesn't throw
|
||||
expect(kubeConfManager.getPath()).toBeUndefined();
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,19 +1,19 @@
|
||||
import { autoUpdater } from "electron-updater"
|
||||
import logger from "./logger"
|
||||
import { autoUpdater } from "electron-updater";
|
||||
import logger from "./logger";
|
||||
|
||||
export class AppUpdater {
|
||||
static readonly defaultUpdateIntervalMs = 1000 * 60 * 60 * 24 // once a day
|
||||
|
||||
static checkForUpdates() {
|
||||
return autoUpdater.checkForUpdatesAndNotify()
|
||||
return autoUpdater.checkForUpdatesAndNotify();
|
||||
}
|
||||
|
||||
constructor(protected updateInterval = AppUpdater.defaultUpdateIntervalMs) {
|
||||
autoUpdater.logger = logger
|
||||
autoUpdater.logger = logger;
|
||||
}
|
||||
|
||||
public start() {
|
||||
setInterval(AppUpdater.checkForUpdates, this.updateInterval)
|
||||
setInterval(AppUpdater.checkForUpdates, this.updateInterval);
|
||||
return AppUpdater.checkForUpdates();
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import request, { RequestPromiseOptions } from "request-promise-native"
|
||||
import request, { RequestPromiseOptions } from "request-promise-native";
|
||||
import { Cluster } from "../cluster";
|
||||
|
||||
export type ClusterDetectionResult = {
|
||||
@ -11,11 +11,11 @@ export class BaseClusterDetector {
|
||||
key: string
|
||||
|
||||
constructor(cluster: Cluster) {
|
||||
this.cluster = cluster
|
||||
this.cluster = cluster;
|
||||
}
|
||||
|
||||
detect(): Promise<ClusterDetectionResult> {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
protected async k8sRequest<T = any>(path: string, options: RequestPromiseOptions = {}): Promise<T> {
|
||||
@ -28,6 +28,6 @@ export class BaseClusterDetector {
|
||||
Host: `${this.cluster.id}.${new URL(this.cluster.kubeProxyUrl).host}`, // required in ClusterManager.getClusterForRequest()
|
||||
...(options.headers || {}),
|
||||
},
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -1,23 +1,23 @@
|
||||
import { BaseClusterDetector } from "./base-cluster-detector";
|
||||
import { createHash } from "crypto"
|
||||
import { createHash } from "crypto";
|
||||
import { ClusterMetadataKey } from "../cluster";
|
||||
|
||||
export class ClusterIdDetector extends BaseClusterDetector {
|
||||
key = ClusterMetadataKey.CLUSTER_ID
|
||||
|
||||
public async detect() {
|
||||
let id: string
|
||||
let id: string;
|
||||
try {
|
||||
id = await this.getDefaultNamespaceId()
|
||||
id = await this.getDefaultNamespaceId();
|
||||
} catch(_) {
|
||||
id = this.cluster.apiUrl
|
||||
id = this.cluster.apiUrl;
|
||||
}
|
||||
const value = createHash("sha256").update(id).digest("hex")
|
||||
return { value: value, accuracy: 100 }
|
||||
const value = createHash("sha256").update(id).digest("hex");
|
||||
return { value: value, accuracy: 100 };
|
||||
}
|
||||
|
||||
protected async getDefaultNamespaceId() {
|
||||
const response = await this.k8sRequest("/api/v1/namespaces/default")
|
||||
return response.metadata.uid
|
||||
const response = await this.k8sRequest("/api/v1/namespaces/default");
|
||||
return response.metadata.uid;
|
||||
}
|
||||
}
|
||||
@ -12,34 +12,34 @@ export class DetectorRegistry {
|
||||
registry = observable.array<typeof BaseClusterDetector>([], { deep: false });
|
||||
|
||||
add(detectorClass: typeof BaseClusterDetector) {
|
||||
this.registry.push(detectorClass)
|
||||
this.registry.push(detectorClass);
|
||||
}
|
||||
|
||||
async detectForCluster(cluster: Cluster): Promise<ClusterMetadata> {
|
||||
const results: {[key: string]: ClusterDetectionResult } = {}
|
||||
const results: {[key: string]: ClusterDetectionResult } = {};
|
||||
for (const detectorClass of this.registry) {
|
||||
const detector = new detectorClass(cluster)
|
||||
const detector = new detectorClass(cluster);
|
||||
try {
|
||||
const data = await detector.detect()
|
||||
const data = await detector.detect();
|
||||
if (!data) continue;
|
||||
const existingValue = results[detector.key]
|
||||
const existingValue = results[detector.key];
|
||||
if (existingValue && existingValue.accuracy > data.accuracy) continue; // previous value exists and is more accurate
|
||||
results[detector.key] = data
|
||||
results[detector.key] = data;
|
||||
} catch (e) {
|
||||
// detector raised error, do nothing
|
||||
}
|
||||
}
|
||||
const metadata: ClusterMetadata = {}
|
||||
const metadata: ClusterMetadata = {};
|
||||
for (const [key, result] of Object.entries(results)) {
|
||||
metadata[key] = result.value
|
||||
metadata[key] = result.value;
|
||||
}
|
||||
return metadata
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
|
||||
export const detectorRegistry = new DetectorRegistry()
|
||||
detectorRegistry.add(ClusterIdDetector)
|
||||
detectorRegistry.add(LastSeenDetector)
|
||||
detectorRegistry.add(VersionDetector)
|
||||
detectorRegistry.add(DistributionDetector)
|
||||
detectorRegistry.add(NodesCountDetector)
|
||||
export const detectorRegistry = new DetectorRegistry();
|
||||
detectorRegistry.add(ClusterIdDetector);
|
||||
detectorRegistry.add(LastSeenDetector);
|
||||
detectorRegistry.add(VersionDetector);
|
||||
detectorRegistry.add(DistributionDetector);
|
||||
detectorRegistry.add(NodesCountDetector);
|
||||
@ -6,75 +6,75 @@ export class DistributionDetector extends BaseClusterDetector {
|
||||
version: string
|
||||
|
||||
public async detect() {
|
||||
this.version = await this.getKubernetesVersion()
|
||||
this.version = await this.getKubernetesVersion();
|
||||
if (await this.isRancher()) {
|
||||
return { value: "rancher", accuracy: 80}
|
||||
return { value: "rancher", accuracy: 80};
|
||||
}
|
||||
if (this.isGKE()) {
|
||||
return { value: "gke", accuracy: 80}
|
||||
return { value: "gke", accuracy: 80};
|
||||
}
|
||||
if (this.isEKS()) {
|
||||
return { value: "eks", accuracy: 80}
|
||||
return { value: "eks", accuracy: 80};
|
||||
}
|
||||
if (this.isIKS()) {
|
||||
return { value: "iks", accuracy: 80}
|
||||
return { value: "iks", accuracy: 80};
|
||||
}
|
||||
if (this.isAKS()) {
|
||||
return { value: "aks", accuracy: 80}
|
||||
return { value: "aks", accuracy: 80};
|
||||
}
|
||||
if (this.isDigitalOcean()) {
|
||||
return { value: "digitalocean", accuracy: 90}
|
||||
return { value: "digitalocean", accuracy: 90};
|
||||
}
|
||||
if (this.isMinikube()) {
|
||||
return { value: "minikube", accuracy: 80}
|
||||
return { value: "minikube", accuracy: 80};
|
||||
}
|
||||
if (this.isCustom()) {
|
||||
return { value: "custom", accuracy: 10}
|
||||
return { value: "custom", accuracy: 10};
|
||||
}
|
||||
return { value: "unknown", accuracy: 10}
|
||||
return { value: "unknown", accuracy: 10};
|
||||
}
|
||||
|
||||
public async getKubernetesVersion() {
|
||||
if (this.cluster.version) return this.cluster.version
|
||||
if (this.cluster.version) return this.cluster.version;
|
||||
|
||||
const response = await this.k8sRequest("/version")
|
||||
return response.gitVersion
|
||||
const response = await this.k8sRequest("/version");
|
||||
return response.gitVersion;
|
||||
}
|
||||
|
||||
protected isGKE() {
|
||||
return this.version.includes("gke")
|
||||
return this.version.includes("gke");
|
||||
}
|
||||
|
||||
protected isEKS() {
|
||||
return this.version.includes("eks")
|
||||
return this.version.includes("eks");
|
||||
}
|
||||
|
||||
protected isIKS() {
|
||||
return this.version.includes("IKS")
|
||||
return this.version.includes("IKS");
|
||||
}
|
||||
|
||||
protected isAKS() {
|
||||
return this.cluster.apiUrl.endsWith("azmk8s.io")
|
||||
return this.cluster.apiUrl.endsWith("azmk8s.io");
|
||||
}
|
||||
|
||||
protected isDigitalOcean() {
|
||||
return this.cluster.apiUrl.endsWith("k8s.ondigitalocean.com")
|
||||
return this.cluster.apiUrl.endsWith("k8s.ondigitalocean.com");
|
||||
}
|
||||
|
||||
protected isMinikube() {
|
||||
return this.cluster.contextName.startsWith("minikube")
|
||||
return this.cluster.contextName.startsWith("minikube");
|
||||
}
|
||||
|
||||
protected isCustom() {
|
||||
return this.version.includes("+")
|
||||
return this.version.includes("+");
|
||||
}
|
||||
|
||||
protected async isRancher() {
|
||||
try {
|
||||
const response = await this.k8sRequest("")
|
||||
return response.data.find((api: any) => api?.apiVersion?.group === "meta.cattle.io") !== undefined
|
||||
const response = await this.k8sRequest("");
|
||||
return response.data.find((api: any) => api?.apiVersion?.group === "meta.cattle.io") !== undefined;
|
||||
} catch (e) {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -7,7 +7,7 @@ export class LastSeenDetector extends BaseClusterDetector {
|
||||
public async detect() {
|
||||
if (!this.cluster.accessible) return null;
|
||||
|
||||
await this.k8sRequest("/version")
|
||||
return { value: new Date().toJSON(), accuracy: 100 }
|
||||
await this.k8sRequest("/version");
|
||||
return { value: new Date().toJSON(), accuracy: 100 };
|
||||
}
|
||||
}
|
||||
@ -6,12 +6,12 @@ export class NodesCountDetector extends BaseClusterDetector {
|
||||
|
||||
public async detect() {
|
||||
if (!this.cluster.accessible) return null;
|
||||
const nodeCount = await this.getNodeCount()
|
||||
return { value: nodeCount, accuracy: 100}
|
||||
const nodeCount = await this.getNodeCount();
|
||||
return { value: nodeCount, accuracy: 100};
|
||||
}
|
||||
|
||||
protected async getNodeCount(): Promise<number> {
|
||||
const response = await this.k8sRequest("/api/v1/nodes")
|
||||
return response.items.length
|
||||
const response = await this.k8sRequest("/api/v1/nodes");
|
||||
return response.items.length;
|
||||
}
|
||||
}
|
||||
@ -6,12 +6,12 @@ export class VersionDetector extends BaseClusterDetector {
|
||||
value: string
|
||||
|
||||
public async detect() {
|
||||
const version = await this.getKubernetesVersion()
|
||||
return { value: version, accuracy: 100}
|
||||
const version = await this.getKubernetesVersion();
|
||||
return { value: version, accuracy: 100};
|
||||
}
|
||||
|
||||
public async getKubernetesVersion() {
|
||||
const response = await this.k8sRequest("/version")
|
||||
return response.gitVersion
|
||||
const response = await this.k8sRequest("/version");
|
||||
return response.gitVersion;
|
||||
}
|
||||
}
|
||||
@ -1,16 +1,16 @@
|
||||
import "../common/cluster-ipc";
|
||||
import type http from "http"
|
||||
import { ipcMain } from "electron"
|
||||
import type http from "http";
|
||||
import { ipcMain } from "electron";
|
||||
import { autorun } from "mobx";
|
||||
import { clusterStore, getClusterIdFromHost } from "../common/cluster-store"
|
||||
import { Cluster } from "./cluster"
|
||||
import { clusterStore, getClusterIdFromHost } from "../common/cluster-store";
|
||||
import { Cluster } from "./cluster";
|
||||
import logger from "./logger";
|
||||
import { apiKubePrefix } from "../common/vars";
|
||||
import { Singleton } from "../common/utils";
|
||||
|
||||
export class ClusterManager extends Singleton {
|
||||
constructor(public readonly port: number) {
|
||||
super()
|
||||
super();
|
||||
// auto-init clusters
|
||||
autorun(() => {
|
||||
clusterStore.enabledClustersList.forEach(cluster => {
|
||||
@ -34,52 +34,52 @@ export class ClusterManager extends Singleton {
|
||||
delay: 250
|
||||
});
|
||||
|
||||
ipcMain.on("network:offline", () => { this.onNetworkOffline() })
|
||||
ipcMain.on("network:online", () => { this.onNetworkOnline() })
|
||||
ipcMain.on("network:offline", () => { this.onNetworkOffline(); });
|
||||
ipcMain.on("network:online", () => { this.onNetworkOnline(); });
|
||||
}
|
||||
|
||||
protected onNetworkOffline() {
|
||||
logger.info("[CLUSTER-MANAGER]: network is offline")
|
||||
logger.info("[CLUSTER-MANAGER]: network is offline");
|
||||
clusterStore.enabledClustersList.forEach((cluster) => {
|
||||
if (!cluster.disconnected) {
|
||||
cluster.online = false
|
||||
cluster.accessible = false
|
||||
cluster.refreshConnectionStatus().catch((e) => e)
|
||||
cluster.online = false;
|
||||
cluster.accessible = false;
|
||||
cluster.refreshConnectionStatus().catch((e) => e);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
protected onNetworkOnline() {
|
||||
logger.info("[CLUSTER-MANAGER]: network is online")
|
||||
logger.info("[CLUSTER-MANAGER]: network is online");
|
||||
clusterStore.enabledClustersList.forEach((cluster) => {
|
||||
if (!cluster.disconnected) {
|
||||
cluster.refreshConnectionStatus().catch((e) => e)
|
||||
cluster.refreshConnectionStatus().catch((e) => e);
|
||||
}
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
stop() {
|
||||
clusterStore.clusters.forEach((cluster: Cluster) => {
|
||||
cluster.disconnect();
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
getClusterForRequest(req: http.IncomingMessage): Cluster {
|
||||
let cluster: Cluster = null
|
||||
let cluster: Cluster = null;
|
||||
|
||||
// lens-server is connecting to 127.0.0.1:<port>/<uid>
|
||||
if (req.headers.host.startsWith("127.0.0.1")) {
|
||||
const clusterId = req.url.split("/")[1]
|
||||
cluster = clusterStore.getById(clusterId)
|
||||
const clusterId = req.url.split("/")[1];
|
||||
cluster = clusterStore.getById(clusterId);
|
||||
if (cluster) {
|
||||
// we need to swap path prefix so that request is proxied to kube api
|
||||
req.url = req.url.replace(`/${clusterId}`, apiKubePrefix)
|
||||
req.url = req.url.replace(`/${clusterId}`, apiKubePrefix);
|
||||
}
|
||||
} else if (req.headers["x-cluster-id"]) {
|
||||
cluster = clusterStore.getById(req.headers["x-cluster-id"].toString())
|
||||
cluster = clusterStore.getById(req.headers["x-cluster-id"].toString());
|
||||
} else {
|
||||
const clusterId = getClusterIdFromHost(req.headers.host);
|
||||
cluster = clusterStore.getById(clusterId)
|
||||
cluster = clusterStore.getById(clusterId);
|
||||
}
|
||||
|
||||
return cluster;
|
||||
|
||||
@ -1,18 +1,18 @@
|
||||
import { ipcMain } from "electron"
|
||||
import type { ClusterId, ClusterMetadata, ClusterModel, ClusterPreferences } from "../common/cluster-store"
|
||||
import { ipcMain } from "electron";
|
||||
import type { ClusterId, ClusterMetadata, ClusterModel, ClusterPreferences } from "../common/cluster-store";
|
||||
import type { IMetricsReqParams } from "../renderer/api/endpoints/metrics.api";
|
||||
import type { WorkspaceId } from "../common/workspace-store";
|
||||
import { action, computed, observable, reaction, toJS, when } from "mobx";
|
||||
import { apiKubePrefix } from "../common/vars";
|
||||
import { broadcastMessage } from "../common/ipc";
|
||||
import { ContextHandler } from "./context-handler"
|
||||
import { AuthorizationV1Api, CoreV1Api, KubeConfig, V1ResourceAttributes } from "@kubernetes/client-node"
|
||||
import { ContextHandler } from "./context-handler";
|
||||
import { AuthorizationV1Api, CoreV1Api, KubeConfig, V1ResourceAttributes } from "@kubernetes/client-node";
|
||||
import { Kubectl } from "./kubectl";
|
||||
import { KubeconfigManager } from "./kubeconfig-manager"
|
||||
import { getNodeWarningConditions, loadConfig, podHasIssues } from "../common/kube-helpers"
|
||||
import request, { RequestPromiseOptions } from "request-promise-native"
|
||||
import { KubeconfigManager } from "./kubeconfig-manager";
|
||||
import { getNodeWarningConditions, loadConfig, podHasIssues } from "../common/kube-helpers";
|
||||
import request, { RequestPromiseOptions } from "request-promise-native";
|
||||
import { apiResources } from "../common/rbac";
|
||||
import logger from "./logger"
|
||||
import logger from "./logger";
|
||||
import { VersionDetector } from "./cluster-detectors/version-detector";
|
||||
import { detectorRegistry } from "./cluster-detectors/detector-registry";
|
||||
|
||||
@ -86,23 +86,23 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
}
|
||||
|
||||
@computed get name() {
|
||||
return this.preferences.clusterName || this.contextName
|
||||
return this.preferences.clusterName || this.contextName;
|
||||
}
|
||||
|
||||
get version(): string {
|
||||
return String(this.metadata?.version) || ""
|
||||
return String(this.metadata?.version) || "";
|
||||
}
|
||||
|
||||
constructor(model: ClusterModel) {
|
||||
this.updateModel(model);
|
||||
const kubeconfig = this.getKubeconfig()
|
||||
const kubeconfig = this.getKubeconfig();
|
||||
if (kubeconfig.getContextObject(this.contextName)) {
|
||||
this.apiUrl = kubeconfig.getCluster(kubeconfig.getContextObject(this.contextName).cluster).server
|
||||
this.apiUrl = kubeconfig.getCluster(kubeconfig.getContextObject(this.contextName).cluster).server;
|
||||
}
|
||||
}
|
||||
|
||||
get isManaged(): boolean {
|
||||
return !!this.ownerRef
|
||||
return !!this.ownerRef;
|
||||
}
|
||||
|
||||
@action
|
||||
@ -131,16 +131,16 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
}
|
||||
|
||||
protected bindEvents() {
|
||||
logger.info(`[CLUSTER]: bind events`, this.getMeta())
|
||||
const refreshTimer = setInterval(() => !this.disconnected && this.refresh(), 30000) // every 30s
|
||||
const refreshMetadataTimer = setInterval(() => !this.disconnected && this.refreshMetadata(), 900000) // every 15 minutes
|
||||
logger.info(`[CLUSTER]: bind events`, this.getMeta());
|
||||
const refreshTimer = setInterval(() => !this.disconnected && this.refresh(), 30000); // every 30s
|
||||
const refreshMetadataTimer = setInterval(() => !this.disconnected && this.refreshMetadata(), 900000); // every 15 minutes
|
||||
|
||||
if (ipcMain) {
|
||||
this.eventDisposers.push(
|
||||
reaction(() => this.getState(), () => this.pushState()),
|
||||
() => {
|
||||
clearInterval(refreshTimer)
|
||||
clearInterval(refreshMetadataTimer)
|
||||
clearInterval(refreshTimer);
|
||||
clearInterval(refreshMetadataTimer);
|
||||
},
|
||||
);
|
||||
}
|
||||
@ -165,20 +165,20 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
if (this.disconnected || !this.accessible) {
|
||||
await this.reconnect();
|
||||
}
|
||||
await this.refreshConnectionStatus()
|
||||
await this.refreshConnectionStatus();
|
||||
if (this.accessible) {
|
||||
await this.refreshAllowedResources()
|
||||
this.isAdmin = await this.isClusterAdmin()
|
||||
this.ready = true
|
||||
this.ensureKubectl()
|
||||
await this.refreshAllowedResources();
|
||||
this.isAdmin = await this.isClusterAdmin();
|
||||
this.ready = true;
|
||||
this.ensureKubectl();
|
||||
}
|
||||
this.activated = true
|
||||
this.activated = true;
|
||||
return this.pushState();
|
||||
}
|
||||
|
||||
protected async ensureKubectl() {
|
||||
this.kubeCtl = new Kubectl(this.version)
|
||||
return this.kubeCtl.ensureKubectl() // download kubectl in background, so it's not blocking dashboard
|
||||
this.kubeCtl = new Kubectl(this.version);
|
||||
return this.kubeCtl.ensureKubectl(); // download kubectl in background, so it's not blocking dashboard
|
||||
}
|
||||
|
||||
@action
|
||||
@ -214,9 +214,9 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
this.refreshAllowedResources(),
|
||||
]);
|
||||
if (opts.refreshMetadata) {
|
||||
this.refreshMetadata()
|
||||
this.refreshMetadata();
|
||||
}
|
||||
this.ready = true
|
||||
this.ready = true;
|
||||
}
|
||||
this.pushState();
|
||||
}
|
||||
@ -224,9 +224,9 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
@action
|
||||
async refreshMetadata() {
|
||||
logger.info(`[CLUSTER]: refreshMetadata`, this.getMeta());
|
||||
const metadata = await detectorRegistry.detectForCluster(this)
|
||||
const existingMetadata = this.metadata
|
||||
this.metadata = Object.assign(existingMetadata, metadata)
|
||||
const metadata = await detectorRegistry.detectForCluster(this);
|
||||
const existingMetadata = this.metadata;
|
||||
this.metadata = Object.assign(existingMetadata, metadata);
|
||||
}
|
||||
|
||||
@action
|
||||
@ -256,16 +256,16 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
}
|
||||
|
||||
getProxyKubeconfigPath(): string {
|
||||
return this.kubeconfigManager.getPath()
|
||||
return this.kubeconfigManager.getPath();
|
||||
}
|
||||
|
||||
protected async k8sRequest<T = any>(path: string, options: RequestPromiseOptions = {}): Promise<T> {
|
||||
options.headers ??= {}
|
||||
options.json ??= true
|
||||
options.timeout ??= 30000
|
||||
options.headers.Host = `${this.id}.${new URL(this.kubeProxyUrl).host}` // required in ClusterManager.getClusterForRequest()
|
||||
options.headers ??= {};
|
||||
options.json ??= true;
|
||||
options.timeout ??= 30000;
|
||||
options.headers.Host = `${this.id}.${new URL(this.kubeProxyUrl).host}`; // required in ClusterManager.getClusterForRequest()
|
||||
|
||||
return request(this.kubeProxyUrl + path, options)
|
||||
return request(this.kubeProxyUrl + path, options);
|
||||
}
|
||||
|
||||
getMetrics(prometheusPath: string, queryParams: IMetricsReqParams & { query: string }) {
|
||||
@ -276,17 +276,17 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
resolveWithFullResponse: false,
|
||||
json: true,
|
||||
qs: queryParams,
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
protected async getConnectionStatus(): Promise<ClusterStatus> {
|
||||
try {
|
||||
const versionDetector = new VersionDetector(this)
|
||||
const versionData = await versionDetector.detect()
|
||||
this.metadata.version = versionData.value
|
||||
const versionDetector = new VersionDetector(this);
|
||||
const versionData = await versionDetector.detect();
|
||||
this.metadata.version = versionData.value;
|
||||
return ClusterStatus.AccessGranted;
|
||||
} catch (error) {
|
||||
logger.error(`Failed to connect cluster "${this.contextName}": ${error}`)
|
||||
logger.error(`Failed to connect cluster "${this.contextName}": ${error}`);
|
||||
if (error.statusCode) {
|
||||
if (error.statusCode >= 400 && error.statusCode < 500) {
|
||||
this.failureReason = "Invalid credentials";
|
||||
@ -310,17 +310,17 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
}
|
||||
|
||||
async canI(resourceAttributes: V1ResourceAttributes): Promise<boolean> {
|
||||
const authApi = this.getProxyKubeconfig().makeApiClient(AuthorizationV1Api)
|
||||
const authApi = this.getProxyKubeconfig().makeApiClient(AuthorizationV1Api);
|
||||
try {
|
||||
const accessReview = await authApi.createSelfSubjectAccessReview({
|
||||
apiVersion: "authorization.k8s.io/v1",
|
||||
kind: "SelfSubjectAccessReview",
|
||||
spec: { resourceAttributes }
|
||||
})
|
||||
return accessReview.body.status.allowed
|
||||
});
|
||||
return accessReview.body.status.allowed;
|
||||
} catch (error) {
|
||||
logger.error(`failed to request selfSubjectAccessReview: ${error}`)
|
||||
return false
|
||||
logger.error(`failed to request selfSubjectAccessReview: ${error}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@ -329,7 +329,7 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
namespace: "kube-system",
|
||||
resource: "*",
|
||||
verb: "create",
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
protected async getEventCount(): Promise<number> {
|
||||
@ -345,7 +345,7 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
if (w.involvedObject.kind === 'Pod') {
|
||||
try {
|
||||
const { body: pod } = await client.readNamespacedPod(w.involvedObject.name, w.involvedObject.namespace);
|
||||
logger.debug(`checking pod ${w.involvedObject.namespace}/${w.involvedObject.name}`)
|
||||
logger.debug(`checking pod ${w.involvedObject.namespace}/${w.involvedObject.name}`);
|
||||
if (podHasIssues(pod)) {
|
||||
uniqEventSources.add(w.involvedObject.uid);
|
||||
}
|
||||
@ -361,7 +361,7 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
.reduce((sum, conditions) => sum + conditions.length, 0);
|
||||
return uniqEventSources.size + nodeNotificationCount;
|
||||
} catch (error) {
|
||||
logger.error("Failed to fetch event count: " + JSON.stringify(error))
|
||||
logger.error("Failed to fetch event count: " + JSON.stringify(error));
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
@ -379,7 +379,7 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
};
|
||||
return toJS(model, {
|
||||
recurseEverything: true
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
// serializable cluster-state used for sync btw main <-> renderer
|
||||
@ -399,17 +399,17 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
};
|
||||
return toJS(state, {
|
||||
recurseEverything: true
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
@action
|
||||
setState(state: ClusterState) {
|
||||
Object.assign(this, state)
|
||||
Object.assign(this, state);
|
||||
}
|
||||
|
||||
pushState(state = this.getState()) {
|
||||
logger.silly(`[CLUSTER]: push-state`, state);
|
||||
broadcastMessage("cluster:state", this.id, state)
|
||||
broadcastMessage("cluster:state", this.id, state);
|
||||
}
|
||||
|
||||
// get cluster system meta, e.g. use in "logger"
|
||||
@ -422,30 +422,30 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
online: this.online,
|
||||
accessible: this.accessible,
|
||||
disconnected: this.disconnected,
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
protected async getAllowedNamespaces() {
|
||||
if (this.accessibleNamespaces.length) {
|
||||
return this.accessibleNamespaces
|
||||
return this.accessibleNamespaces;
|
||||
}
|
||||
|
||||
const api = this.getProxyKubeconfig().makeApiClient(CoreV1Api)
|
||||
const api = this.getProxyKubeconfig().makeApiClient(CoreV1Api);
|
||||
try {
|
||||
const namespaceList = await api.listNamespace()
|
||||
const namespaceList = await api.listNamespace();
|
||||
const nsAccessStatuses = await Promise.all(
|
||||
namespaceList.body.items.map(ns => this.canI({
|
||||
namespace: ns.metadata.name,
|
||||
resource: "pods",
|
||||
verb: "list",
|
||||
}))
|
||||
)
|
||||
);
|
||||
return namespaceList.body.items
|
||||
.filter((ns, i) => nsAccessStatuses[i])
|
||||
.map(ns => ns.metadata.name)
|
||||
.map(ns => ns.metadata.name);
|
||||
} catch (error) {
|
||||
const ctx = this.getProxyKubeconfig().getContextObject(this.contextName)
|
||||
if (ctx.namespace) return [ctx.namespace]
|
||||
const ctx = this.getProxyKubeconfig().getContextObject(this.contextName);
|
||||
if (ctx.namespace) return [ctx.namespace];
|
||||
return [];
|
||||
}
|
||||
}
|
||||
@ -462,12 +462,12 @@ export class Cluster implements ClusterModel, ClusterState {
|
||||
verb: "list",
|
||||
namespace: this.allowedNamespaces[0]
|
||||
}))
|
||||
)
|
||||
);
|
||||
return apiResources
|
||||
.filter((resource, i) => resourceAccessStatuses[i])
|
||||
.map(apiResource => apiResource.resource)
|
||||
.map(apiResource => apiResource.resource);
|
||||
} catch (error) {
|
||||
return []
|
||||
return [];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import type { PrometheusProvider, PrometheusService } from "./prometheus/provider-registry"
|
||||
import type { PrometheusProvider, PrometheusService } from "./prometheus/provider-registry";
|
||||
import type { ClusterPreferences } from "../common/cluster-store";
|
||||
import type { Cluster } from "./cluster"
|
||||
import type httpProxy from "http-proxy"
|
||||
import type { Cluster } from "./cluster";
|
||||
import type httpProxy from "http-proxy";
|
||||
import url, { UrlWithStringQuery } from "url";
|
||||
import { CoreV1Api } from "@kubernetes/client-node"
|
||||
import { prometheusProviders } from "../common/prometheus-providers"
|
||||
import logger from "./logger"
|
||||
import { getFreePort } from "./port"
|
||||
import { KubeAuthProxy } from "./kube-auth-proxy"
|
||||
import { CoreV1Api } from "@kubernetes/client-node";
|
||||
import { prometheusProviders } from "../common/prometheus-providers";
|
||||
import logger from "./logger";
|
||||
import { getFreePort } from "./port";
|
||||
import { KubeAuthProxy } from "./kube-auth-proxy";
|
||||
|
||||
export class ContextHandler {
|
||||
public proxyPort: number;
|
||||
@ -26,64 +26,64 @@ export class ContextHandler {
|
||||
this.prometheusProvider = preferences.prometheusProvider?.type;
|
||||
this.prometheusPath = null;
|
||||
if (preferences.prometheus) {
|
||||
const { namespace, service, port } = preferences.prometheus
|
||||
this.prometheusPath = `${namespace}/services/${service}:${port}`
|
||||
const { namespace, service, port } = preferences.prometheus;
|
||||
this.prometheusPath = `${namespace}/services/${service}:${port}`;
|
||||
}
|
||||
}
|
||||
|
||||
protected async resolvePrometheusPath(): Promise<string> {
|
||||
const { service, namespace, port } = await this.getPrometheusService()
|
||||
return `${namespace}/services/${service}:${port}`
|
||||
const { service, namespace, port } = await this.getPrometheusService();
|
||||
return `${namespace}/services/${service}:${port}`;
|
||||
}
|
||||
|
||||
async getPrometheusProvider() {
|
||||
if (!this.prometheusProvider) {
|
||||
const service = await this.getPrometheusService()
|
||||
logger.info(`using ${service.id} as prometheus provider`)
|
||||
this.prometheusProvider = service.id
|
||||
const service = await this.getPrometheusService();
|
||||
logger.info(`using ${service.id} as prometheus provider`);
|
||||
this.prometheusProvider = service.id;
|
||||
}
|
||||
return prometheusProviders.find(p => p.id === this.prometheusProvider)
|
||||
return prometheusProviders.find(p => p.id === this.prometheusProvider);
|
||||
}
|
||||
|
||||
async getPrometheusService(): Promise<PrometheusService> {
|
||||
const providers = this.prometheusProvider ? prometheusProviders.filter(provider => provider.id == this.prometheusProvider) : prometheusProviders;
|
||||
const prometheusPromises: Promise<PrometheusService>[] = providers.map(async (provider: PrometheusProvider): Promise<PrometheusService> => {
|
||||
const apiClient = this.cluster.getProxyKubeconfig().makeApiClient(CoreV1Api)
|
||||
return await provider.getPrometheusService(apiClient)
|
||||
})
|
||||
const resolvedPrometheusServices = await Promise.all(prometheusPromises)
|
||||
const apiClient = this.cluster.getProxyKubeconfig().makeApiClient(CoreV1Api);
|
||||
return await provider.getPrometheusService(apiClient);
|
||||
});
|
||||
const resolvedPrometheusServices = await Promise.all(prometheusPromises);
|
||||
const service = resolvedPrometheusServices.filter(n => n)[0];
|
||||
return service || {
|
||||
id: "lens",
|
||||
namespace: "lens-metrics",
|
||||
service: "prometheus",
|
||||
port: 80
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async getPrometheusPath(): Promise<string> {
|
||||
if (!this.prometheusPath) {
|
||||
this.prometheusPath = await this.resolvePrometheusPath()
|
||||
this.prometheusPath = await this.resolvePrometheusPath();
|
||||
}
|
||||
return this.prometheusPath;
|
||||
}
|
||||
|
||||
async resolveAuthProxyUrl() {
|
||||
const proxyPort = await this.ensurePort();
|
||||
const path = this.clusterUrl.path !== "/" ? this.clusterUrl.path : ""
|
||||
const path = this.clusterUrl.path !== "/" ? this.clusterUrl.path : "";
|
||||
return `http://127.0.0.1:${proxyPort}${path}`;
|
||||
}
|
||||
|
||||
async getApiTarget(isWatchRequest = false): Promise<httpProxy.ServerOptions> {
|
||||
if (this.apiTarget && !isWatchRequest) {
|
||||
return this.apiTarget
|
||||
return this.apiTarget;
|
||||
}
|
||||
const timeout = isWatchRequest ? 4 * 60 * 60 * 1000 : 30000 // 4 hours for watch request, 30 seconds for the rest
|
||||
const apiTarget = await this.newApiTarget(timeout)
|
||||
const timeout = isWatchRequest ? 4 * 60 * 60 * 1000 : 30000; // 4 hours for watch request, 30 seconds for the rest
|
||||
const apiTarget = await this.newApiTarget(timeout);
|
||||
if (!isWatchRequest) {
|
||||
this.apiTarget = apiTarget
|
||||
this.apiTarget = apiTarget;
|
||||
}
|
||||
return apiTarget
|
||||
return apiTarget;
|
||||
}
|
||||
|
||||
protected async newApiTarget(timeout: number): Promise<httpProxy.ServerOptions> {
|
||||
@ -95,36 +95,36 @@ export class ContextHandler {
|
||||
headers: {
|
||||
"Host": this.clusterUrl.hostname,
|
||||
},
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async ensurePort(): Promise<number> {
|
||||
if (!this.proxyPort) {
|
||||
this.proxyPort = await getFreePort();
|
||||
}
|
||||
return this.proxyPort
|
||||
return this.proxyPort;
|
||||
}
|
||||
|
||||
async ensureServer() {
|
||||
if (!this.kubeAuthProxy) {
|
||||
await this.ensurePort();
|
||||
const proxyEnv = Object.assign({}, process.env)
|
||||
const proxyEnv = Object.assign({}, process.env);
|
||||
if (this.cluster.preferences.httpsProxy) {
|
||||
proxyEnv.HTTPS_PROXY = this.cluster.preferences.httpsProxy
|
||||
proxyEnv.HTTPS_PROXY = this.cluster.preferences.httpsProxy;
|
||||
}
|
||||
this.kubeAuthProxy = new KubeAuthProxy(this.cluster, this.proxyPort, proxyEnv)
|
||||
await this.kubeAuthProxy.run()
|
||||
this.kubeAuthProxy = new KubeAuthProxy(this.cluster, this.proxyPort, proxyEnv);
|
||||
await this.kubeAuthProxy.run();
|
||||
}
|
||||
}
|
||||
|
||||
stopServer() {
|
||||
if (this.kubeAuthProxy) {
|
||||
this.kubeAuthProxy.exit()
|
||||
this.kubeAuthProxy = null
|
||||
this.kubeAuthProxy.exit();
|
||||
this.kubeAuthProxy = null;
|
||||
}
|
||||
}
|
||||
|
||||
get proxyLastError(): string {
|
||||
return this.kubeAuthProxy?.lastError || ""
|
||||
return this.kubeAuthProxy?.lastError || "";
|
||||
}
|
||||
}
|
||||
|
||||
@ -6,13 +6,13 @@ import logger from "./logger";
|
||||
|
||||
|
||||
export function exitApp() {
|
||||
const windowManager = WindowManager.getInstance<WindowManager>()
|
||||
const clusterManager = ClusterManager.getInstance<ClusterManager>()
|
||||
appEventBus.emit({ name: "service", action: "close" })
|
||||
const windowManager = WindowManager.getInstance<WindowManager>();
|
||||
const clusterManager = ClusterManager.getInstance<ClusterManager>();
|
||||
appEventBus.emit({ name: "service", action: "close" });
|
||||
windowManager.hide();
|
||||
clusterManager.stop();
|
||||
logger.info('SERVICE:QUIT');
|
||||
setTimeout(() => {
|
||||
app.exit()
|
||||
}, 1000)
|
||||
app.exit();
|
||||
}, 1000);
|
||||
}
|
||||
@ -1,9 +1,9 @@
|
||||
import fs from "fs";
|
||||
import * as yaml from "js-yaml";
|
||||
import { HelmRepo, HelmRepoManager } from "./helm-repo-manager"
|
||||
import { HelmRepo, HelmRepoManager } from "./helm-repo-manager";
|
||||
import logger from "../logger";
|
||||
import { promiseExec } from "../promise-exec"
|
||||
import { helmCli } from "./helm-cli"
|
||||
import { promiseExec } from "../promise-exec";
|
||||
import { helmCli } from "./helm-cli";
|
||||
|
||||
type CachedYaml = {
|
||||
entries: any; // todo: types
|
||||
@ -14,61 +14,61 @@ export class HelmChartManager {
|
||||
protected repo: HelmRepo
|
||||
|
||||
constructor(repo: HelmRepo){
|
||||
this.cache = HelmRepoManager.cache
|
||||
this.repo = repo
|
||||
this.cache = HelmRepoManager.cache;
|
||||
this.repo = repo;
|
||||
}
|
||||
|
||||
public async chart(name: string) {
|
||||
const charts = await this.charts()
|
||||
return charts[name]
|
||||
const charts = await this.charts();
|
||||
return charts[name];
|
||||
}
|
||||
|
||||
public async charts(): Promise<any> {
|
||||
try {
|
||||
const cachedYaml = await this.cachedYaml()
|
||||
return cachedYaml["entries"]
|
||||
const cachedYaml = await this.cachedYaml();
|
||||
return cachedYaml["entries"];
|
||||
} catch(error) {
|
||||
logger.error(error)
|
||||
return []
|
||||
logger.error(error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public async getReadme(name: string, version = "") {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
if(version && version != "") {
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show readme ${this.repo.name}/${name} --version ${version}`).catch((error) => { throw(error.stderr)})
|
||||
return stdout
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show readme ${this.repo.name}/${name} --version ${version}`).catch((error) => { throw(error.stderr);});
|
||||
return stdout;
|
||||
} else {
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show readme ${this.repo.name}/${name}`).catch((error) => { throw(error.stderr)})
|
||||
return stdout
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show readme ${this.repo.name}/${name}`).catch((error) => { throw(error.stderr);});
|
||||
return stdout;
|
||||
}
|
||||
}
|
||||
|
||||
public async getValues(name: string, version = "") {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
if(version && version != "") {
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show values ${this.repo.name}/${name} --version ${version}`).catch((error) => { throw(error.stderr)})
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show values ${this.repo.name}/${name} --version ${version}`).catch((error) => { throw(error.stderr);});
|
||||
|
||||
return stdout
|
||||
return stdout;
|
||||
} else {
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show values ${this.repo.name}/${name}`).catch((error) => { throw(error.stderr)})
|
||||
const { stdout, stderr} = await promiseExec(`"${helm}" show values ${this.repo.name}/${name}`).catch((error) => { throw(error.stderr);});
|
||||
|
||||
return stdout
|
||||
return stdout;
|
||||
}
|
||||
}
|
||||
|
||||
protected async cachedYaml(): Promise<CachedYaml> {
|
||||
if (!(this.repo.name in this.cache)) {
|
||||
const cacheFile = await fs.promises.readFile(this.repo.cacheFilePath, 'utf-8')
|
||||
const data = yaml.safeLoad(cacheFile)
|
||||
const cacheFile = await fs.promises.readFile(this.repo.cacheFilePath, 'utf-8');
|
||||
const data = yaml.safeLoad(cacheFile);
|
||||
for(const key in data["entries"]) {
|
||||
data["entries"][key].forEach((version: any) => {
|
||||
version['repo'] = this.repo.name
|
||||
version['created'] = Date.parse(version.created).toString()
|
||||
})
|
||||
version['repo'] = this.repo.name;
|
||||
version['created'] = Date.parse(version.created).toString();
|
||||
});
|
||||
}
|
||||
this.cache[this.repo.name] = Buffer.from(JSON.stringify(data))
|
||||
this.cache[this.repo.name] = Buffer.from(JSON.stringify(data));
|
||||
}
|
||||
return JSON.parse(this.cache[this.repo.name].toString())
|
||||
return JSON.parse(this.cache[this.repo.name].toString());
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import packageInfo from "../../../package.json"
|
||||
import path from "path"
|
||||
import { LensBinary, LensBinaryOpts } from "../lens-binary"
|
||||
import packageInfo from "../../../package.json";
|
||||
import path from "path";
|
||||
import { LensBinary, LensBinaryOpts } from "../lens-binary";
|
||||
import { isProduction } from "../../common/vars";
|
||||
|
||||
export class HelmCli extends LensBinary {
|
||||
@ -11,24 +11,24 @@ export class HelmCli extends LensBinary {
|
||||
baseDir: baseDir,
|
||||
originalBinaryName: "helm",
|
||||
newBinaryName: "helm3"
|
||||
}
|
||||
super(opts)
|
||||
};
|
||||
super(opts);
|
||||
}
|
||||
|
||||
protected getTarName(): string | null {
|
||||
return `${this.binaryName}-v${this.binaryVersion}-${this.platformName}-${this.arch}.tar.gz`
|
||||
return `${this.binaryName}-v${this.binaryVersion}-${this.platformName}-${this.arch}.tar.gz`;
|
||||
}
|
||||
|
||||
protected getUrl() {
|
||||
return `https://get.helm.sh/helm-v${this.binaryVersion}-${this.platformName}-${this.arch}.tar.gz`
|
||||
return `https://get.helm.sh/helm-v${this.binaryVersion}-${this.platformName}-${this.arch}.tar.gz`;
|
||||
}
|
||||
|
||||
protected getBinaryPath() {
|
||||
return path.join(this.dirname, this.binaryName)
|
||||
return path.join(this.dirname, this.binaryName);
|
||||
}
|
||||
|
||||
protected getOriginalBinaryPath() {
|
||||
return path.join(this.dirname, this.platformName + "-" + this.arch, this.originalBinaryName)
|
||||
return path.join(this.dirname, this.platformName + "-" + this.arch, this.originalBinaryName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import * as tempy from "tempy";
|
||||
import fs from "fs";
|
||||
import * as yaml from "js-yaml";
|
||||
import { promiseExec} from "../promise-exec"
|
||||
import { promiseExec} from "../promise-exec";
|
||||
import { helmCli } from "./helm-cli";
|
||||
import { Cluster } from "../cluster";
|
||||
import { toCamelCase } from "../../common/utils/camelCase";
|
||||
@ -9,103 +9,103 @@ import { toCamelCase } from "../../common/utils/camelCase";
|
||||
export class HelmReleaseManager {
|
||||
|
||||
public async listReleases(pathToKubeconfig: string, namespace?: string) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const namespaceFlag = namespace ? `-n ${namespace}` : "--all-namespaces"
|
||||
const { stdout } = await promiseExec(`"${helm}" ls --output json ${namespaceFlag} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr)})
|
||||
const helm = await helmCli.binaryPath();
|
||||
const namespaceFlag = namespace ? `-n ${namespace}` : "--all-namespaces";
|
||||
const { stdout } = await promiseExec(`"${helm}" ls --output json ${namespaceFlag} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr);});
|
||||
|
||||
const output = JSON.parse(stdout)
|
||||
const output = JSON.parse(stdout);
|
||||
if (output.length == 0) {
|
||||
return output
|
||||
return output;
|
||||
}
|
||||
output.forEach((release: any, index: number) => {
|
||||
output[index] = toCamelCase(release)
|
||||
output[index] = toCamelCase(release);
|
||||
});
|
||||
return output
|
||||
return output;
|
||||
}
|
||||
|
||||
|
||||
public async installChart(chart: string, values: any, name: string, namespace: string, version: string, pathToKubeconfig: string){
|
||||
const helm = await helmCli.binaryPath()
|
||||
const fileName = tempy.file({name: "values.yaml"})
|
||||
await fs.promises.writeFile(fileName, yaml.safeDump(values))
|
||||
const helm = await helmCli.binaryPath();
|
||||
const fileName = tempy.file({name: "values.yaml"});
|
||||
await fs.promises.writeFile(fileName, yaml.safeDump(values));
|
||||
try {
|
||||
let generateName = ""
|
||||
let generateName = "";
|
||||
if (!name) {
|
||||
generateName = "--generate-name"
|
||||
name = ""
|
||||
generateName = "--generate-name";
|
||||
name = "";
|
||||
}
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" install ${name} ${chart} --version ${version} -f ${fileName} --namespace ${namespace} --kubeconfig ${pathToKubeconfig} ${generateName}`).catch((error) => { throw(error.stderr)})
|
||||
const releaseName = stdout.split("\n")[0].split(' ')[1].trim()
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" install ${name} ${chart} --version ${version} -f ${fileName} --namespace ${namespace} --kubeconfig ${pathToKubeconfig} ${generateName}`).catch((error) => { throw(error.stderr);});
|
||||
const releaseName = stdout.split("\n")[0].split(' ')[1].trim();
|
||||
return {
|
||||
log: stdout,
|
||||
release: {
|
||||
name: releaseName,
|
||||
namespace: namespace
|
||||
}
|
||||
}
|
||||
};
|
||||
} finally {
|
||||
await fs.promises.unlink(fileName)
|
||||
await fs.promises.unlink(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
public async upgradeRelease(name: string, chart: string, values: any, namespace: string, version: string, cluster: Cluster){
|
||||
const helm = await helmCli.binaryPath()
|
||||
const fileName = tempy.file({name: "values.yaml"})
|
||||
await fs.promises.writeFile(fileName, yaml.safeDump(values))
|
||||
const helm = await helmCli.binaryPath();
|
||||
const fileName = tempy.file({name: "values.yaml"});
|
||||
await fs.promises.writeFile(fileName, yaml.safeDump(values));
|
||||
|
||||
try {
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" upgrade ${name} ${chart} --version ${version} -f ${fileName} --namespace ${namespace} --kubeconfig ${cluster.getProxyKubeconfigPath()}`).catch((error) => { throw(error.stderr)})
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" upgrade ${name} ${chart} --version ${version} -f ${fileName} --namespace ${namespace} --kubeconfig ${cluster.getProxyKubeconfigPath()}`).catch((error) => { throw(error.stderr);});
|
||||
return {
|
||||
log: stdout,
|
||||
release: this.getRelease(name, namespace, cluster)
|
||||
}
|
||||
};
|
||||
} finally {
|
||||
await fs.promises.unlink(fileName)
|
||||
await fs.promises.unlink(fileName);
|
||||
}
|
||||
}
|
||||
|
||||
public async getRelease(name: string, namespace: string, cluster: Cluster) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" status ${name} --output json --namespace ${namespace} --kubeconfig ${cluster.getProxyKubeconfigPath()}`).catch((error) => { throw(error.stderr)})
|
||||
const release = JSON.parse(stdout)
|
||||
release.resources = await this.getResources(name, namespace, cluster)
|
||||
return release
|
||||
const helm = await helmCli.binaryPath();
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" status ${name} --output json --namespace ${namespace} --kubeconfig ${cluster.getProxyKubeconfigPath()}`).catch((error) => { throw(error.stderr);});
|
||||
const release = JSON.parse(stdout);
|
||||
release.resources = await this.getResources(name, namespace, cluster);
|
||||
return release;
|
||||
}
|
||||
|
||||
public async deleteRelease(name: string, namespace: string, pathToKubeconfig: string) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" delete ${name} --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr)})
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" delete ${name} --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr);});
|
||||
|
||||
return stdout
|
||||
return stdout;
|
||||
}
|
||||
|
||||
public async getValues(name: string, namespace: string, pathToKubeconfig: string) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" get values ${name} --all --output yaml --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr)})
|
||||
return stdout
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" get values ${name} --all --output yaml --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr);});
|
||||
return stdout;
|
||||
}
|
||||
|
||||
public async getHistory(name: string, namespace: string, pathToKubeconfig: string) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" history ${name} --output json --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr)})
|
||||
return JSON.parse(stdout)
|
||||
const helm = await helmCli.binaryPath();
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" history ${name} --output json --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr);});
|
||||
return JSON.parse(stdout);
|
||||
}
|
||||
|
||||
public async rollback(name: string, namespace: string, revision: number, pathToKubeconfig: string) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" rollback ${name} ${revision} --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr)})
|
||||
return stdout
|
||||
const helm = await helmCli.binaryPath();
|
||||
const {stdout, stderr} = await promiseExec(`"${helm}" rollback ${name} ${revision} --namespace ${namespace} --kubeconfig ${pathToKubeconfig}`).catch((error) => { throw(error.stderr);});
|
||||
return stdout;
|
||||
}
|
||||
|
||||
protected async getResources(name: string, namespace: string, cluster: Cluster) {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const kubectl = await cluster.kubeCtl.getPath()
|
||||
const pathToKubeconfig = cluster.getProxyKubeconfigPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
const kubectl = await cluster.kubeCtl.getPath();
|
||||
const pathToKubeconfig = cluster.getProxyKubeconfigPath();
|
||||
const { stdout } = await promiseExec(`"${helm}" get manifest ${name} --namespace ${namespace} --kubeconfig ${pathToKubeconfig} | "${kubectl}" get -n ${namespace} --kubeconfig ${pathToKubeconfig} -f - -o=json`).catch((error) => {
|
||||
return { stdout: JSON.stringify({items: []})}
|
||||
})
|
||||
return stdout
|
||||
return { stdout: JSON.stringify({items: []})};
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
}
|
||||
|
||||
export const releaseManager = new HelmReleaseManager()
|
||||
export const releaseManager = new HelmReleaseManager();
|
||||
|
||||
@ -46,34 +46,34 @@ export class HelmRepoManager extends Singleton {
|
||||
}
|
||||
|
||||
async init() {
|
||||
helmCli.setLogger(logger)
|
||||
helmCli.setLogger(logger);
|
||||
await helmCli.ensureBinary();
|
||||
if (!this.initialized) {
|
||||
this.helmEnv = await this.parseHelmEnv()
|
||||
await this.update()
|
||||
this.initialized = true
|
||||
this.helmEnv = await this.parseHelmEnv();
|
||||
await this.update();
|
||||
this.initialized = true;
|
||||
}
|
||||
}
|
||||
|
||||
protected async parseHelmEnv() {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout } = await promiseExec(`"${helm}" env`).catch((error) => {
|
||||
throw(error.stderr)
|
||||
})
|
||||
const lines = stdout.split(/\r?\n/) // split by new line feed
|
||||
const env: HelmEnv = {}
|
||||
throw(error.stderr);
|
||||
});
|
||||
const lines = stdout.split(/\r?\n/); // split by new line feed
|
||||
const env: HelmEnv = {};
|
||||
lines.forEach((line: string) => {
|
||||
const [key, value] = line.split("=")
|
||||
const [key, value] = line.split("=");
|
||||
if (key && value) {
|
||||
env[key] = value.replace(/"/g, "") // strip quotas
|
||||
env[key] = value.replace(/"/g, ""); // strip quotas
|
||||
}
|
||||
})
|
||||
return env
|
||||
});
|
||||
return env;
|
||||
}
|
||||
|
||||
public async repositories(): Promise<HelmRepo[]> {
|
||||
if (!this.initialized) {
|
||||
await this.init()
|
||||
await this.init();
|
||||
}
|
||||
try {
|
||||
const repoConfigFile = this.helmEnv.HELM_REPOSITORY_CONFIG;
|
||||
@ -91,41 +91,41 @@ export class HelmRepoManager extends Singleton {
|
||||
cacheFilePath: `${this.helmEnv.HELM_REPOSITORY_CACHE}/${repo.name}-index.yaml`
|
||||
}));
|
||||
} catch (error) {
|
||||
logger.error(`[HELM]: repositories listing error "${error}"`)
|
||||
return []
|
||||
logger.error(`[HELM]: repositories listing error "${error}"`);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
public async repository(name: string) {
|
||||
const repositories = await this.repositories()
|
||||
const repositories = await this.repositories();
|
||||
return repositories.find(repo => repo.name == name);
|
||||
}
|
||||
|
||||
public async update() {
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout } = await promiseExec(`"${helm}" repo update`).catch((error) => {
|
||||
return { stdout: error.stdout }
|
||||
})
|
||||
return stdout
|
||||
return { stdout: error.stdout };
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
|
||||
public async addRepo({ name, url }: HelmRepo) {
|
||||
logger.info(`[HELM]: adding repo "${name}" from ${url}`);
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout } = await promiseExec(`"${helm}" repo add ${name} ${url}`).catch((error) => {
|
||||
throw(error.stderr)
|
||||
})
|
||||
return stdout
|
||||
throw(error.stderr);
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
|
||||
public async removeRepo({ name, url }: HelmRepo): Promise<string> {
|
||||
logger.info(`[HELM]: removing repo "${name}" from ${url}`);
|
||||
const helm = await helmCli.binaryPath()
|
||||
const helm = await helmCli.binaryPath();
|
||||
const { stdout, stderr } = await promiseExec(`"${helm}" repo remove ${name}`).catch((error) => {
|
||||
throw(error.stderr)
|
||||
})
|
||||
return stdout
|
||||
throw(error.stderr);
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
}
|
||||
|
||||
export const repoManager = HelmRepoManager.getInstance<HelmRepoManager>()
|
||||
export const repoManager = HelmRepoManager.getInstance<HelmRepoManager>();
|
||||
|
||||
@ -6,93 +6,93 @@ import { releaseManager } from "./helm-release-manager";
|
||||
|
||||
class HelmService {
|
||||
public async installChart(cluster: Cluster, data: { chart: string; values: {}; name: string; namespace: string; version: string }) {
|
||||
return await releaseManager.installChart(data.chart, data.values, data.name, data.namespace, data.version, cluster.getProxyKubeconfigPath())
|
||||
return await releaseManager.installChart(data.chart, data.values, data.name, data.namespace, data.version, cluster.getProxyKubeconfigPath());
|
||||
}
|
||||
|
||||
public async listCharts() {
|
||||
const charts: any = {}
|
||||
await repoManager.init()
|
||||
const repositories = await repoManager.repositories()
|
||||
const charts: any = {};
|
||||
await repoManager.init();
|
||||
const repositories = await repoManager.repositories();
|
||||
for (const repo of repositories) {
|
||||
charts[repo.name] = {}
|
||||
const manager = new HelmChartManager(repo)
|
||||
let entries = await manager.charts()
|
||||
entries = this.excludeDeprecated(entries)
|
||||
charts[repo.name] = {};
|
||||
const manager = new HelmChartManager(repo);
|
||||
let entries = await manager.charts();
|
||||
entries = this.excludeDeprecated(entries);
|
||||
for (const key in entries) {
|
||||
entries[key] = entries[key][0]
|
||||
entries[key] = entries[key][0];
|
||||
}
|
||||
charts[repo.name] = entries
|
||||
charts[repo.name] = entries;
|
||||
}
|
||||
return charts
|
||||
return charts;
|
||||
}
|
||||
|
||||
public async getChart(repoName: string, chartName: string, version = "") {
|
||||
const result = {
|
||||
readme: "",
|
||||
versions: {}
|
||||
}
|
||||
const repo = await repoManager.repository(repoName)
|
||||
const chartManager = new HelmChartManager(repo)
|
||||
const chart = await chartManager.chart(chartName)
|
||||
result.readme = await chartManager.getReadme(chartName, version)
|
||||
result.versions = chart
|
||||
return result
|
||||
};
|
||||
const repo = await repoManager.repository(repoName);
|
||||
const chartManager = new HelmChartManager(repo);
|
||||
const chart = await chartManager.chart(chartName);
|
||||
result.readme = await chartManager.getReadme(chartName, version);
|
||||
result.versions = chart;
|
||||
return result;
|
||||
}
|
||||
|
||||
public async getChartValues(repoName: string, chartName: string, version = "") {
|
||||
const repo = await repoManager.repository(repoName)
|
||||
const chartManager = new HelmChartManager(repo)
|
||||
return chartManager.getValues(chartName, version)
|
||||
const repo = await repoManager.repository(repoName);
|
||||
const chartManager = new HelmChartManager(repo);
|
||||
return chartManager.getValues(chartName, version);
|
||||
}
|
||||
|
||||
public async listReleases(cluster: Cluster, namespace: string = null) {
|
||||
await repoManager.init()
|
||||
return await releaseManager.listReleases(cluster.getProxyKubeconfigPath(), namespace)
|
||||
await repoManager.init();
|
||||
return await releaseManager.listReleases(cluster.getProxyKubeconfigPath(), namespace);
|
||||
}
|
||||
|
||||
public async getRelease(cluster: Cluster, releaseName: string, namespace: string) {
|
||||
logger.debug("Fetch release")
|
||||
return await releaseManager.getRelease(releaseName, namespace, cluster)
|
||||
logger.debug("Fetch release");
|
||||
return await releaseManager.getRelease(releaseName, namespace, cluster);
|
||||
}
|
||||
|
||||
public async getReleaseValues(cluster: Cluster, releaseName: string, namespace: string) {
|
||||
logger.debug("Fetch release values")
|
||||
return await releaseManager.getValues(releaseName, namespace, cluster.getProxyKubeconfigPath())
|
||||
logger.debug("Fetch release values");
|
||||
return await releaseManager.getValues(releaseName, namespace, cluster.getProxyKubeconfigPath());
|
||||
}
|
||||
|
||||
public async getReleaseHistory(cluster: Cluster, releaseName: string, namespace: string) {
|
||||
logger.debug("Fetch release history")
|
||||
return await releaseManager.getHistory(releaseName, namespace, cluster.getProxyKubeconfigPath())
|
||||
logger.debug("Fetch release history");
|
||||
return await releaseManager.getHistory(releaseName, namespace, cluster.getProxyKubeconfigPath());
|
||||
}
|
||||
|
||||
public async deleteRelease(cluster: Cluster, releaseName: string, namespace: string) {
|
||||
logger.debug("Delete release")
|
||||
return await releaseManager.deleteRelease(releaseName, namespace, cluster.getProxyKubeconfigPath())
|
||||
logger.debug("Delete release");
|
||||
return await releaseManager.deleteRelease(releaseName, namespace, cluster.getProxyKubeconfigPath());
|
||||
}
|
||||
|
||||
public async updateRelease(cluster: Cluster, releaseName: string, namespace: string, data: { chart: string; values: {}; version: string }) {
|
||||
logger.debug("Upgrade release")
|
||||
return await releaseManager.upgradeRelease(releaseName, data.chart, data.values, namespace, data.version, cluster)
|
||||
logger.debug("Upgrade release");
|
||||
return await releaseManager.upgradeRelease(releaseName, data.chart, data.values, namespace, data.version, cluster);
|
||||
}
|
||||
|
||||
public async rollback(cluster: Cluster, releaseName: string, namespace: string, revision: number) {
|
||||
logger.debug("Rollback release")
|
||||
const output = await releaseManager.rollback(releaseName, namespace, revision, cluster.getProxyKubeconfigPath())
|
||||
return { message: output }
|
||||
logger.debug("Rollback release");
|
||||
const output = await releaseManager.rollback(releaseName, namespace, revision, cluster.getProxyKubeconfigPath());
|
||||
return { message: output };
|
||||
}
|
||||
|
||||
protected excludeDeprecated(entries: any) {
|
||||
for (const key in entries) {
|
||||
entries[key] = entries[key].filter((entry: any) => {
|
||||
if (Array.isArray(entry)) {
|
||||
return entry[0]['deprecated'] != true
|
||||
return entry[0]['deprecated'] != true;
|
||||
}
|
||||
return entry["deprecated"] != true
|
||||
})
|
||||
return entry["deprecated"] != true;
|
||||
});
|
||||
}
|
||||
return entries
|
||||
return entries;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
export const helmService = new HelmService()
|
||||
export const helmService = new HelmService();
|
||||
|
||||
@ -1,25 +1,25 @@
|
||||
// Main process
|
||||
|
||||
import "../common/system-ca"
|
||||
import "../common/prometheus-providers"
|
||||
import * as Mobx from "mobx"
|
||||
import "../common/system-ca";
|
||||
import "../common/prometheus-providers";
|
||||
import * as Mobx from "mobx";
|
||||
import * as LensExtensions from "../extensions/core-api";
|
||||
import { app, dialog } from "electron"
|
||||
import { app, dialog } from "electron";
|
||||
import { appName } from "../common/vars";
|
||||
import path from "path"
|
||||
import { LensProxy } from "./lens-proxy"
|
||||
import path from "path";
|
||||
import { LensProxy } from "./lens-proxy";
|
||||
import { WindowManager } from "./window-manager";
|
||||
import { ClusterManager } from "./cluster-manager";
|
||||
import { AppUpdater } from "./app-updater"
|
||||
import { shellSync } from "./shell-sync"
|
||||
import { getFreePort } from "./port"
|
||||
import { mangleProxyEnv } from "./proxy-env"
|
||||
import { AppUpdater } from "./app-updater";
|
||||
import { shellSync } from "./shell-sync";
|
||||
import { getFreePort } from "./port";
|
||||
import { mangleProxyEnv } from "./proxy-env";
|
||||
import { registerFileProtocol } from "../common/register-protocol";
|
||||
import logger from "./logger"
|
||||
import { clusterStore } from "../common/cluster-store"
|
||||
import logger from "./logger";
|
||||
import { clusterStore } from "../common/cluster-store";
|
||||
import { userStore } from "../common/user-store";
|
||||
import { workspaceStore } from "../common/workspace-store";
|
||||
import { appEventBus } from "../common/event-bus"
|
||||
import { appEventBus } from "../common/event-bus";
|
||||
import { extensionLoader } from "../extensions/extension-loader";
|
||||
import { extensionManager } from "../extensions/extension-manager";
|
||||
import { extensionsStore } from "../extensions/extensions-store";
|
||||
@ -35,16 +35,16 @@ if (!process.env.CICD) {
|
||||
app.setPath("userData", workingDir);
|
||||
}
|
||||
|
||||
mangleProxyEnv()
|
||||
mangleProxyEnv();
|
||||
if (app.commandLine.getSwitchValue("proxy-server") !== "") {
|
||||
process.env.HTTPS_PROXY = app.commandLine.getSwitchValue("proxy-server")
|
||||
process.env.HTTPS_PROXY = app.commandLine.getSwitchValue("proxy-server");
|
||||
}
|
||||
|
||||
app.on("ready", async () => {
|
||||
logger.info(`🚀 Starting Lens from "${workingDir}"`)
|
||||
logger.info(`🚀 Starting Lens from "${workingDir}"`);
|
||||
await shellSync();
|
||||
|
||||
const updater = new AppUpdater()
|
||||
const updater = new AppUpdater();
|
||||
updater.start();
|
||||
|
||||
registerFileProtocol("static", __static);
|
||||
@ -59,10 +59,10 @@ app.on("ready", async () => {
|
||||
|
||||
// find free port
|
||||
try {
|
||||
proxyPort = await getFreePort()
|
||||
proxyPort = await getFreePort();
|
||||
} catch (error) {
|
||||
logger.error(error)
|
||||
dialog.showErrorBox("Lens Error", "Could not find a free port for the cluster proxy")
|
||||
logger.error(error);
|
||||
dialog.showErrorBox("Lens Error", "Could not find a free port for the cluster proxy");
|
||||
app.exit();
|
||||
}
|
||||
|
||||
@ -73,22 +73,22 @@ app.on("ready", async () => {
|
||||
try {
|
||||
proxyServer = LensProxy.create(proxyPort, clusterManager);
|
||||
} catch (error) {
|
||||
logger.error(`Could not start proxy (127.0.0:${proxyPort}): ${error.message}`)
|
||||
dialog.showErrorBox("Lens Error", `Could not start proxy (127.0.0:${proxyPort}): ${error.message || "unknown error"}`)
|
||||
logger.error(`Could not start proxy (127.0.0:${proxyPort}): ${error.message}`);
|
||||
dialog.showErrorBox("Lens Error", `Could not start proxy (127.0.0:${proxyPort}): ${error.message || "unknown error"}`);
|
||||
app.exit();
|
||||
}
|
||||
|
||||
extensionLoader.init()
|
||||
extensionLoader.init();
|
||||
windowManager = WindowManager.getInstance<WindowManager>(proxyPort);
|
||||
extensionLoader.initExtensions(await extensionManager.load()); // call after windowManager to see splash earlier
|
||||
|
||||
setTimeout(() => {
|
||||
appEventBus.emit({ name: "service", action: "start" })
|
||||
}, 1000)
|
||||
appEventBus.emit({ name: "service", action: "start" });
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
app.on("activate", (event, hasVisibleWindows) => {
|
||||
logger.info('APP:ACTIVATE', { hasVisibleWindows })
|
||||
logger.info('APP:ACTIVATE', { hasVisibleWindows });
|
||||
if (!hasVisibleWindows) {
|
||||
windowManager.initMainWindow();
|
||||
}
|
||||
@ -97,11 +97,11 @@ app.on("activate", (event, hasVisibleWindows) => {
|
||||
// Quit app on Cmd+Q (MacOS)
|
||||
app.on("will-quit", (event) => {
|
||||
logger.info('APP:QUIT');
|
||||
appEventBus.emit({name: "app", action: "close"})
|
||||
appEventBus.emit({name: "app", action: "close"});
|
||||
event.preventDefault(); // prevent app's default shutdown (e.g. required for telemetry, etc.)
|
||||
clusterManager?.stop(); // close cluster connections
|
||||
return; // skip exit to make tray work, to quit go to app's global menu or tray's menu
|
||||
})
|
||||
});
|
||||
|
||||
// Extensions-api runtime exports
|
||||
export const LensExtensionsApi = {
|
||||
@ -111,4 +111,4 @@ export const LensExtensionsApi = {
|
||||
export {
|
||||
Mobx,
|
||||
LensExtensionsApi as LensExtensions,
|
||||
}
|
||||
};
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import { ChildProcess, spawn } from "child_process"
|
||||
import { ChildProcess, spawn } from "child_process";
|
||||
import { waitUntilUsed } from "tcp-port-used";
|
||||
import { broadcastMessage } from "../common/ipc";
|
||||
import type { Cluster } from "./cluster"
|
||||
import { Kubectl } from "./kubectl"
|
||||
import logger from "./logger"
|
||||
import * as url from "url"
|
||||
import type { Cluster } from "./cluster";
|
||||
import { Kubectl } from "./kubectl";
|
||||
import logger from "./logger";
|
||||
import * as url from "url";
|
||||
|
||||
export interface KubeAuthProxyLog {
|
||||
data: string;
|
||||
@ -21,10 +21,10 @@ export class KubeAuthProxy {
|
||||
protected kubectl: Kubectl
|
||||
|
||||
constructor(cluster: Cluster, port: number, env: NodeJS.ProcessEnv) {
|
||||
this.env = env
|
||||
this.port = port
|
||||
this.cluster = cluster
|
||||
this.kubectl = Kubectl.bundled()
|
||||
this.env = env;
|
||||
this.port = port;
|
||||
this.cluster = cluster;
|
||||
this.kubectl = Kubectl.bundled();
|
||||
}
|
||||
|
||||
get acceptHosts() {
|
||||
@ -36,7 +36,7 @@ export class KubeAuthProxy {
|
||||
return;
|
||||
}
|
||||
|
||||
const proxyBin = await this.kubectl.getPath()
|
||||
const proxyBin = await this.kubectl.getPath();
|
||||
const args = [
|
||||
"proxy",
|
||||
"-p", `${this.port}`,
|
||||
@ -44,63 +44,63 @@ export class KubeAuthProxy {
|
||||
"--context", `${this.cluster.contextName}`,
|
||||
"--accept-hosts", this.acceptHosts,
|
||||
"--reject-paths", "^[^/]"
|
||||
]
|
||||
];
|
||||
if (process.env.DEBUG_PROXY === "true") {
|
||||
args.push("-v", "9")
|
||||
args.push("-v", "9");
|
||||
}
|
||||
logger.debug(`spawning kubectl proxy with args: ${args}`)
|
||||
this.proxyProcess = spawn(proxyBin, args, { env: this.env, })
|
||||
logger.debug(`spawning kubectl proxy with args: ${args}`);
|
||||
this.proxyProcess = spawn(proxyBin, args, { env: this.env, });
|
||||
this.proxyProcess.on("error", (error) => {
|
||||
this.sendIpcLogMessage({ data: error.message, error: true })
|
||||
this.exit()
|
||||
})
|
||||
this.sendIpcLogMessage({ data: error.message, error: true });
|
||||
this.exit();
|
||||
});
|
||||
|
||||
this.proxyProcess.on("exit", (code) => {
|
||||
this.sendIpcLogMessage({ data: `proxy exited with code: ${code}`, error: code > 0 })
|
||||
this.sendIpcLogMessage({ data: `proxy exited with code: ${code}`, error: code > 0 });
|
||||
this.exit();
|
||||
})
|
||||
});
|
||||
|
||||
this.proxyProcess.stdout.on('data', (data) => {
|
||||
let logItem = data.toString()
|
||||
let logItem = data.toString();
|
||||
if (logItem.startsWith("Starting to serve on")) {
|
||||
logItem = "Authentication proxy started\n"
|
||||
logItem = "Authentication proxy started\n";
|
||||
}
|
||||
this.sendIpcLogMessage({ data: logItem })
|
||||
})
|
||||
this.sendIpcLogMessage({ data: logItem });
|
||||
});
|
||||
|
||||
this.proxyProcess.stderr.on('data', (data) => {
|
||||
this.lastError = this.parseError(data.toString())
|
||||
this.sendIpcLogMessage({ data: data.toString(), error: true })
|
||||
})
|
||||
this.lastError = this.parseError(data.toString());
|
||||
this.sendIpcLogMessage({ data: data.toString(), error: true });
|
||||
});
|
||||
|
||||
return waitUntilUsed(this.port, 500, 10000)
|
||||
return waitUntilUsed(this.port, 500, 10000);
|
||||
}
|
||||
|
||||
protected parseError(data: string) {
|
||||
const error = data.split("http: proxy error:").slice(1).join("").trim()
|
||||
let errorMsg = error
|
||||
const jsonError = error.split("Response: ")[1]
|
||||
const error = data.split("http: proxy error:").slice(1).join("").trim();
|
||||
let errorMsg = error;
|
||||
const jsonError = error.split("Response: ")[1];
|
||||
if (jsonError) {
|
||||
try {
|
||||
const parsedError = JSON.parse(jsonError)
|
||||
errorMsg = parsedError.error_description || parsedError.error || jsonError
|
||||
const parsedError = JSON.parse(jsonError);
|
||||
errorMsg = parsedError.error_description || parsedError.error || jsonError;
|
||||
} catch (_) {
|
||||
errorMsg = jsonError.trim()
|
||||
errorMsg = jsonError.trim();
|
||||
}
|
||||
}
|
||||
return errorMsg
|
||||
return errorMsg;
|
||||
}
|
||||
|
||||
protected async sendIpcLogMessage(res: KubeAuthProxyLog) {
|
||||
const channel = `kube-auth:${this.cluster.id}`
|
||||
const channel = `kube-auth:${this.cluster.id}`;
|
||||
logger.info(`[KUBE-AUTH]: out-channel "${channel}"`, { ...res, meta: this.cluster.getMeta() });
|
||||
broadcastMessage(channel, res)
|
||||
broadcastMessage(channel, res);
|
||||
}
|
||||
|
||||
public exit() {
|
||||
if (!this.proxyProcess) return;
|
||||
logger.debug("[KUBE-AUTH]: stopping local proxy", this.cluster.getMeta())
|
||||
this.proxyProcess.kill()
|
||||
logger.debug("[KUBE-AUTH]: stopping local proxy", this.cluster.getMeta());
|
||||
this.proxyProcess.kill();
|
||||
this.proxyProcess.removeAllListeners();
|
||||
this.proxyProcess.stderr.removeAllListeners();
|
||||
this.proxyProcess.stdout.removeAllListeners();
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
import type { KubeConfig } from "@kubernetes/client-node";
|
||||
import type { Cluster } from "./cluster"
|
||||
import type { Cluster } from "./cluster";
|
||||
import type { ContextHandler } from "./context-handler";
|
||||
import { app } from "electron"
|
||||
import path from "path"
|
||||
import fs from "fs-extra"
|
||||
import { dumpConfigYaml, loadConfig } from "../common/kube-helpers"
|
||||
import logger from "./logger"
|
||||
import { app } from "electron";
|
||||
import path from "path";
|
||||
import fs from "fs-extra";
|
||||
import { dumpConfigYaml, loadConfig } from "../common/kube-helpers";
|
||||
import logger from "./logger";
|
||||
|
||||
export class KubeconfigManager {
|
||||
protected configDir = app.getPath("temp")
|
||||
@ -14,9 +14,9 @@ export class KubeconfigManager {
|
||||
private constructor(protected cluster: Cluster, protected contextHandler: ContextHandler, protected port: number) { }
|
||||
|
||||
static async create(cluster: Cluster, contextHandler: ContextHandler, port: number) {
|
||||
const kcm = new KubeconfigManager(cluster, contextHandler, port)
|
||||
await kcm.init()
|
||||
return kcm
|
||||
const kcm = new KubeconfigManager(cluster, contextHandler, port);
|
||||
await kcm.init();
|
||||
return kcm;
|
||||
}
|
||||
|
||||
protected async init() {
|
||||
@ -24,7 +24,7 @@ export class KubeconfigManager {
|
||||
await this.contextHandler.ensurePort();
|
||||
await this.createProxyKubeconfig();
|
||||
} catch (err) {
|
||||
logger.error(`Failed to created temp config for auth-proxy`, { err })
|
||||
logger.error(`Failed to created temp config for auth-proxy`, { err });
|
||||
}
|
||||
}
|
||||
|
||||
@ -33,7 +33,7 @@ export class KubeconfigManager {
|
||||
}
|
||||
|
||||
protected resolveProxyUrl() {
|
||||
return `http://127.0.0.1:${this.port}/${this.cluster.id}`
|
||||
return `http://127.0.0.1:${this.port}/${this.cluster.id}`;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -78,11 +78,11 @@ export class KubeconfigManager {
|
||||
|
||||
async unlink() {
|
||||
if (!this.tempFile) {
|
||||
return
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Deleting temporary kubeconfig: ' + this.tempFile)
|
||||
await fs.unlink(this.tempFile)
|
||||
this.tempFile = undefined
|
||||
logger.info('Deleting temporary kubeconfig: ' + this.tempFile);
|
||||
await fs.unlink(this.tempFile);
|
||||
this.tempFile = undefined;
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,17 +1,17 @@
|
||||
import { app, remote } from "electron"
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
import { promiseExec } from "./promise-exec"
|
||||
import logger from "./logger"
|
||||
import { ensureDir, pathExists } from "fs-extra"
|
||||
import * as lockFile from "proper-lockfile"
|
||||
import { helmCli } from "./helm/helm-cli"
|
||||
import { userStore } from "../common/user-store"
|
||||
import { app, remote } from "electron";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import { promiseExec } from "./promise-exec";
|
||||
import logger from "./logger";
|
||||
import { ensureDir, pathExists } from "fs-extra";
|
||||
import * as lockFile from "proper-lockfile";
|
||||
import { helmCli } from "./helm/helm-cli";
|
||||
import { userStore } from "../common/user-store";
|
||||
import { customRequest } from "../common/request";
|
||||
import { getBundledKubectlVersion } from "../common/utils/app-version"
|
||||
import { getBundledKubectlVersion } from "../common/utils/app-version";
|
||||
import { isDevelopment, isWindows, isTestEnv } from "../common/vars";
|
||||
|
||||
const bundledVersion = getBundledKubectlVersion()
|
||||
const bundledVersion = getBundledKubectlVersion();
|
||||
const kubectlMap: Map<string, string> = new Map([
|
||||
["1.7", "1.8.15"],
|
||||
["1.8", "1.9.10"],
|
||||
@ -26,31 +26,31 @@ const kubectlMap: Map<string, string> = new Map([
|
||||
["1.17", bundledVersion],
|
||||
["1.18", "1.18.8"],
|
||||
["1.19", "1.19.0"]
|
||||
])
|
||||
]);
|
||||
|
||||
const packageMirrors: Map<string, string> = new Map([
|
||||
["default", "https://storage.googleapis.com/kubernetes-release/release"],
|
||||
["china", "https://mirror.azure.cn/kubernetes/kubectl"]
|
||||
])
|
||||
]);
|
||||
|
||||
let bundledPath: string
|
||||
const initScriptVersionString = "# lens-initscript v3\n"
|
||||
let bundledPath: string;
|
||||
const initScriptVersionString = "# lens-initscript v3\n";
|
||||
|
||||
export function bundledKubectlPath(): string {
|
||||
if (bundledPath) { return bundledPath }
|
||||
if (bundledPath) { return bundledPath; }
|
||||
|
||||
if (isDevelopment || isTestEnv) {
|
||||
const platformName = isWindows ? "windows" : process.platform
|
||||
bundledPath = path.join(process.cwd(), "binaries", "client", platformName, process.arch, "kubectl")
|
||||
const platformName = isWindows ? "windows" : process.platform;
|
||||
bundledPath = path.join(process.cwd(), "binaries", "client", platformName, process.arch, "kubectl");
|
||||
} else {
|
||||
bundledPath = path.join(process.resourcesPath, process.arch, "kubectl")
|
||||
bundledPath = path.join(process.resourcesPath, process.arch, "kubectl");
|
||||
}
|
||||
|
||||
if (isWindows) {
|
||||
bundledPath = `${bundledPath}.exe`
|
||||
bundledPath = `${bundledPath}.exe`;
|
||||
}
|
||||
|
||||
return bundledPath
|
||||
return bundledPath;
|
||||
}
|
||||
|
||||
export class Kubectl {
|
||||
@ -61,7 +61,7 @@ export class Kubectl {
|
||||
protected dirname: string
|
||||
|
||||
static get kubectlDir() {
|
||||
return path.join((app || remote.app).getPath("userData"), "binaries", "kubectl")
|
||||
return path.join((app || remote.app).getPath("userData"), "binaries", "kubectl");
|
||||
}
|
||||
|
||||
public static readonly bundledKubectlVersion: string = bundledVersion
|
||||
@ -70,270 +70,270 @@ export class Kubectl {
|
||||
|
||||
// Returns the single bundled Kubectl instance
|
||||
public static bundled() {
|
||||
if (!Kubectl.bundledInstance) Kubectl.bundledInstance = new Kubectl(Kubectl.bundledKubectlVersion)
|
||||
return Kubectl.bundledInstance
|
||||
if (!Kubectl.bundledInstance) Kubectl.bundledInstance = new Kubectl(Kubectl.bundledKubectlVersion);
|
||||
return Kubectl.bundledInstance;
|
||||
}
|
||||
|
||||
constructor(clusterVersion: string) {
|
||||
const versionParts = /^v?(\d+\.\d+)(.*)/.exec(clusterVersion)
|
||||
const minorVersion = versionParts[1]
|
||||
const versionParts = /^v?(\d+\.\d+)(.*)/.exec(clusterVersion);
|
||||
const minorVersion = versionParts[1];
|
||||
/* minorVersion is the first two digits of kube server version
|
||||
if the version map includes that, use that version, if not, fallback to the exact x.y.z of kube version */
|
||||
if (kubectlMap.has(minorVersion)) {
|
||||
this.kubectlVersion = kubectlMap.get(minorVersion)
|
||||
logger.debug("Set kubectl version " + this.kubectlVersion + " for cluster version " + clusterVersion + " using version map")
|
||||
this.kubectlVersion = kubectlMap.get(minorVersion);
|
||||
logger.debug("Set kubectl version " + this.kubectlVersion + " for cluster version " + clusterVersion + " using version map");
|
||||
} else {
|
||||
this.kubectlVersion = versionParts[1] + versionParts[2]
|
||||
logger.debug("Set kubectl version " + this.kubectlVersion + " for cluster version " + clusterVersion + " using fallback")
|
||||
this.kubectlVersion = versionParts[1] + versionParts[2];
|
||||
logger.debug("Set kubectl version " + this.kubectlVersion + " for cluster version " + clusterVersion + " using fallback");
|
||||
}
|
||||
|
||||
let arch = null
|
||||
let arch = null;
|
||||
|
||||
if (process.arch == "x64") {
|
||||
arch = "amd64"
|
||||
arch = "amd64";
|
||||
} else if (process.arch == "x86" || process.arch == "ia32") {
|
||||
arch = "386"
|
||||
arch = "386";
|
||||
} else {
|
||||
arch = process.arch
|
||||
arch = process.arch;
|
||||
}
|
||||
|
||||
const platformName = isWindows ? "windows" : process.platform
|
||||
const binaryName = isWindows ? "kubectl.exe" : "kubectl"
|
||||
const platformName = isWindows ? "windows" : process.platform;
|
||||
const binaryName = isWindows ? "kubectl.exe" : "kubectl";
|
||||
|
||||
this.url = `${this.getDownloadMirror()}/v${this.kubectlVersion}/bin/${platformName}/${arch}/${binaryName}`
|
||||
this.url = `${this.getDownloadMirror()}/v${this.kubectlVersion}/bin/${platformName}/${arch}/${binaryName}`;
|
||||
|
||||
this.dirname = path.normalize(path.join(this.getDownloadDir(), this.kubectlVersion))
|
||||
this.path = path.join(this.dirname, binaryName)
|
||||
this.dirname = path.normalize(path.join(this.getDownloadDir(), this.kubectlVersion));
|
||||
this.path = path.join(this.dirname, binaryName);
|
||||
}
|
||||
|
||||
public getBundledPath() {
|
||||
return bundledKubectlPath()
|
||||
return bundledKubectlPath();
|
||||
}
|
||||
|
||||
public getPathFromPreferences() {
|
||||
return userStore.preferences?.kubectlBinariesPath || this.getBundledPath()
|
||||
return userStore.preferences?.kubectlBinariesPath || this.getBundledPath();
|
||||
}
|
||||
|
||||
protected getDownloadDir() {
|
||||
if (userStore.preferences?.downloadBinariesPath) {
|
||||
return path.join(userStore.preferences.downloadBinariesPath, "kubectl")
|
||||
return path.join(userStore.preferences.downloadBinariesPath, "kubectl");
|
||||
}
|
||||
|
||||
return Kubectl.kubectlDir
|
||||
return Kubectl.kubectlDir;
|
||||
}
|
||||
|
||||
public async getPath(bundled = false): Promise<string> {
|
||||
if (userStore.preferences?.downloadKubectlBinaries === false) {
|
||||
return this.getPathFromPreferences()
|
||||
return this.getPathFromPreferences();
|
||||
}
|
||||
|
||||
// return binary name if bundled path is not functional
|
||||
if (!await this.checkBinary(this.getBundledPath(), false)) {
|
||||
Kubectl.invalidBundle = true
|
||||
return path.basename(this.getBundledPath())
|
||||
Kubectl.invalidBundle = true;
|
||||
return path.basename(this.getBundledPath());
|
||||
}
|
||||
|
||||
try {
|
||||
if (!await this.ensureKubectl()) {
|
||||
logger.error("Failed to ensure kubectl, fallback to the bundled version")
|
||||
return this.getBundledPath()
|
||||
logger.error("Failed to ensure kubectl, fallback to the bundled version");
|
||||
return this.getBundledPath();
|
||||
}
|
||||
return this.path
|
||||
return this.path;
|
||||
} catch (err) {
|
||||
logger.error("Failed to ensure kubectl, fallback to the bundled version")
|
||||
logger.error(err)
|
||||
return this.getBundledPath()
|
||||
logger.error("Failed to ensure kubectl, fallback to the bundled version");
|
||||
logger.error(err);
|
||||
return this.getBundledPath();
|
||||
}
|
||||
}
|
||||
|
||||
public async binDir() {
|
||||
try {
|
||||
await this.ensureKubectl()
|
||||
await this.writeInitScripts()
|
||||
return this.dirname
|
||||
await this.ensureKubectl();
|
||||
await this.writeInitScripts();
|
||||
return this.dirname;
|
||||
} catch (err) {
|
||||
logger.error(err)
|
||||
return ""
|
||||
logger.error(err);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
public async checkBinary(path: string, checkVersion = true) {
|
||||
const exists = await pathExists(path)
|
||||
const exists = await pathExists(path);
|
||||
if (exists) {
|
||||
try {
|
||||
const { stdout } = await promiseExec(`"${path}" version --client=true -o json`)
|
||||
const output = JSON.parse(stdout)
|
||||
const { stdout } = await promiseExec(`"${path}" version --client=true -o json`);
|
||||
const output = JSON.parse(stdout);
|
||||
if (!checkVersion) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
let version: string = output.clientVersion.gitVersion
|
||||
let version: string = output.clientVersion.gitVersion;
|
||||
if (version[0] === 'v') {
|
||||
version = version.slice(1)
|
||||
version = version.slice(1);
|
||||
}
|
||||
if (version === this.kubectlVersion) {
|
||||
logger.debug(`Local kubectl is version ${this.kubectlVersion}`)
|
||||
return true
|
||||
logger.debug(`Local kubectl is version ${this.kubectlVersion}`);
|
||||
return true;
|
||||
}
|
||||
logger.error(`Local kubectl is version ${version}, expected ${this.kubectlVersion}, unlinking`)
|
||||
logger.error(`Local kubectl is version ${version}, expected ${this.kubectlVersion}, unlinking`);
|
||||
} catch (err) {
|
||||
logger.error(`Local kubectl failed to run properly (${err.message}), unlinking`)
|
||||
logger.error(`Local kubectl failed to run properly (${err.message}), unlinking`);
|
||||
}
|
||||
await fs.promises.unlink(this.path)
|
||||
await fs.promises.unlink(this.path);
|
||||
}
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
|
||||
protected async checkBundled(): Promise<boolean> {
|
||||
if (this.kubectlVersion === Kubectl.bundledKubectlVersion) {
|
||||
try {
|
||||
const exist = await pathExists(this.path)
|
||||
const exist = await pathExists(this.path);
|
||||
if (!exist) {
|
||||
await fs.promises.copyFile(this.getBundledPath(), this.path)
|
||||
await fs.promises.chmod(this.path, 0o755)
|
||||
await fs.promises.copyFile(this.getBundledPath(), this.path);
|
||||
await fs.promises.chmod(this.path, 0o755);
|
||||
}
|
||||
return true
|
||||
return true;
|
||||
} catch (err) {
|
||||
logger.error("Could not copy the bundled kubectl to app-data: " + err)
|
||||
return false
|
||||
logger.error("Could not copy the bundled kubectl to app-data: " + err);
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async ensureKubectl(): Promise<boolean> {
|
||||
if (userStore.preferences?.downloadKubectlBinaries === false) {
|
||||
return true
|
||||
return true;
|
||||
}
|
||||
if (Kubectl.invalidBundle) {
|
||||
logger.error(`Detected invalid bundle binary, returning ...`)
|
||||
return false
|
||||
logger.error(`Detected invalid bundle binary, returning ...`);
|
||||
return false;
|
||||
}
|
||||
await ensureDir(this.dirname, 0o755)
|
||||
await ensureDir(this.dirname, 0o755);
|
||||
return lockFile.lock(this.dirname).then(async (release) => {
|
||||
logger.debug(`Acquired a lock for ${this.kubectlVersion}`)
|
||||
const bundled = await this.checkBundled()
|
||||
let isValid = await this.checkBinary(this.path, !bundled)
|
||||
logger.debug(`Acquired a lock for ${this.kubectlVersion}`);
|
||||
const bundled = await this.checkBundled();
|
||||
let isValid = await this.checkBinary(this.path, !bundled);
|
||||
if (!isValid && !bundled) {
|
||||
await this.downloadKubectl().catch((error) => {
|
||||
logger.error(error)
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`)
|
||||
release()
|
||||
return false
|
||||
logger.error(error);
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`);
|
||||
release();
|
||||
return false;
|
||||
});
|
||||
isValid = !await this.checkBinary(this.path, false)
|
||||
isValid = !await this.checkBinary(this.path, false);
|
||||
}
|
||||
if (!isValid) {
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`)
|
||||
release()
|
||||
return false
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`);
|
||||
release();
|
||||
return false;
|
||||
}
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`)
|
||||
release()
|
||||
return true
|
||||
logger.debug(`Releasing lock for ${this.kubectlVersion}`);
|
||||
release();
|
||||
return true;
|
||||
}).catch((e) => {
|
||||
logger.error(`Failed to get a lock for ${this.kubectlVersion}`)
|
||||
logger.error(e)
|
||||
return false
|
||||
})
|
||||
logger.error(`Failed to get a lock for ${this.kubectlVersion}`);
|
||||
logger.error(e);
|
||||
return false;
|
||||
});
|
||||
}
|
||||
|
||||
public async downloadKubectl() {
|
||||
await ensureDir(path.dirname(this.path), 0o755)
|
||||
await ensureDir(path.dirname(this.path), 0o755);
|
||||
|
||||
logger.info(`Downloading kubectl ${this.kubectlVersion} from ${this.url} to ${this.path}`)
|
||||
logger.info(`Downloading kubectl ${this.kubectlVersion} from ${this.url} to ${this.path}`);
|
||||
return new Promise((resolve, reject) => {
|
||||
const stream = customRequest({
|
||||
url: this.url,
|
||||
gzip: true,
|
||||
});
|
||||
const file = fs.createWriteStream(this.path)
|
||||
const file = fs.createWriteStream(this.path);
|
||||
stream.on("complete", () => {
|
||||
logger.debug("kubectl binary download finished")
|
||||
file.end()
|
||||
})
|
||||
logger.debug("kubectl binary download finished");
|
||||
file.end();
|
||||
});
|
||||
stream.on("error", (error) => {
|
||||
logger.error(error)
|
||||
logger.error(error);
|
||||
fs.unlink(this.path, () => {
|
||||
// do nothing
|
||||
})
|
||||
reject(error)
|
||||
})
|
||||
});
|
||||
reject(error);
|
||||
});
|
||||
file.on("close", () => {
|
||||
logger.debug("kubectl binary download closed")
|
||||
logger.debug("kubectl binary download closed");
|
||||
fs.chmod(this.path, 0o755, (err) => {
|
||||
if (err) reject(err);
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
stream.pipe(file)
|
||||
})
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(file);
|
||||
});
|
||||
}
|
||||
|
||||
protected async writeInitScripts() {
|
||||
const kubectlPath = userStore.preferences?.downloadKubectlBinaries ? this.dirname : path.dirname(this.getPathFromPreferences())
|
||||
const helmPath = helmCli.getBinaryDir()
|
||||
const kubectlPath = userStore.preferences?.downloadKubectlBinaries ? this.dirname : path.dirname(this.getPathFromPreferences());
|
||||
const helmPath = helmCli.getBinaryDir();
|
||||
const fsPromises = fs.promises;
|
||||
const bashScriptPath = path.join(this.dirname, '.bash_set_path')
|
||||
const bashScriptPath = path.join(this.dirname, '.bash_set_path');
|
||||
|
||||
let bashScript = "" + initScriptVersionString
|
||||
bashScript += "tempkubeconfig=\"$KUBECONFIG\"\n"
|
||||
bashScript += "test -f \"/etc/profile\" && . \"/etc/profile\"\n"
|
||||
bashScript += "if test -f \"$HOME/.bash_profile\"; then\n"
|
||||
bashScript += " . \"$HOME/.bash_profile\"\n"
|
||||
bashScript += "elif test -f \"$HOME/.bash_login\"; then\n"
|
||||
bashScript += " . \"$HOME/.bash_login\"\n"
|
||||
bashScript += "elif test -f \"$HOME/.profile\"; then\n"
|
||||
bashScript += " . \"$HOME/.profile\"\n"
|
||||
bashScript += "fi\n"
|
||||
bashScript += `export PATH="${helmPath}:${kubectlPath}:$PATH"\n`
|
||||
bashScript += "export KUBECONFIG=\"$tempkubeconfig\"\n"
|
||||
let bashScript = "" + initScriptVersionString;
|
||||
bashScript += "tempkubeconfig=\"$KUBECONFIG\"\n";
|
||||
bashScript += "test -f \"/etc/profile\" && . \"/etc/profile\"\n";
|
||||
bashScript += "if test -f \"$HOME/.bash_profile\"; then\n";
|
||||
bashScript += " . \"$HOME/.bash_profile\"\n";
|
||||
bashScript += "elif test -f \"$HOME/.bash_login\"; then\n";
|
||||
bashScript += " . \"$HOME/.bash_login\"\n";
|
||||
bashScript += "elif test -f \"$HOME/.profile\"; then\n";
|
||||
bashScript += " . \"$HOME/.profile\"\n";
|
||||
bashScript += "fi\n";
|
||||
bashScript += `export PATH="${helmPath}:${kubectlPath}:$PATH"\n`;
|
||||
bashScript += "export KUBECONFIG=\"$tempkubeconfig\"\n";
|
||||
|
||||
bashScript += "NO_PROXY=\",${NO_PROXY:-localhost},\"\n"
|
||||
bashScript += "NO_PROXY=\"${NO_PROXY//,localhost,/,}\"\n"
|
||||
bashScript += "NO_PROXY=\"${NO_PROXY//,127.0.0.1,/,}\"\n"
|
||||
bashScript += "NO_PROXY=\"localhost,127.0.0.1${NO_PROXY%,}\"\n"
|
||||
bashScript += "export NO_PROXY\n"
|
||||
bashScript += "unset tempkubeconfig\n"
|
||||
await fsPromises.writeFile(bashScriptPath, bashScript.toString(), { mode: 0o644 })
|
||||
bashScript += "NO_PROXY=\",${NO_PROXY:-localhost},\"\n";
|
||||
bashScript += "NO_PROXY=\"${NO_PROXY//,localhost,/,}\"\n";
|
||||
bashScript += "NO_PROXY=\"${NO_PROXY//,127.0.0.1,/,}\"\n";
|
||||
bashScript += "NO_PROXY=\"localhost,127.0.0.1${NO_PROXY%,}\"\n";
|
||||
bashScript += "export NO_PROXY\n";
|
||||
bashScript += "unset tempkubeconfig\n";
|
||||
await fsPromises.writeFile(bashScriptPath, bashScript.toString(), { mode: 0o644 });
|
||||
|
||||
const zshScriptPath = path.join(this.dirname, '.zlogin')
|
||||
const zshScriptPath = path.join(this.dirname, '.zlogin');
|
||||
|
||||
let zshScript = "" + initScriptVersionString
|
||||
let zshScript = "" + initScriptVersionString;
|
||||
|
||||
zshScript += "tempkubeconfig=\"$KUBECONFIG\"\n"
|
||||
zshScript += "tempkubeconfig=\"$KUBECONFIG\"\n";
|
||||
// restore previous ZDOTDIR
|
||||
zshScript += "export ZDOTDIR=\"$OLD_ZDOTDIR\"\n"
|
||||
zshScript += "export ZDOTDIR=\"$OLD_ZDOTDIR\"\n";
|
||||
// source all the files
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zshenv\" && . \"$OLD_ZDOTDIR/.zshenv\"\n"
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zprofile\" && . \"$OLD_ZDOTDIR/.zprofile\"\n"
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zlogin\" && . \"$OLD_ZDOTDIR/.zlogin\"\n"
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zshrc\" && . \"$OLD_ZDOTDIR/.zshrc\"\n"
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zshenv\" && . \"$OLD_ZDOTDIR/.zshenv\"\n";
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zprofile\" && . \"$OLD_ZDOTDIR/.zprofile\"\n";
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zlogin\" && . \"$OLD_ZDOTDIR/.zlogin\"\n";
|
||||
zshScript += "test -f \"$OLD_ZDOTDIR/.zshrc\" && . \"$OLD_ZDOTDIR/.zshrc\"\n";
|
||||
|
||||
// voodoo to replace any previous occurrences of kubectl path in the PATH
|
||||
zshScript += `kubectlpath=\"${kubectlPath}"\n`
|
||||
zshScript += `helmpath=\"${helmPath}"\n`
|
||||
zshScript += "p=\":$kubectlpath:\"\n"
|
||||
zshScript += "d=\":$PATH:\"\n"
|
||||
zshScript += "d=${d//$p/:}\n"
|
||||
zshScript += "d=${d/#:/}\n"
|
||||
zshScript += "export PATH=\"$helmpath:$kubectlpath:${d/%:/}\"\n"
|
||||
zshScript += "export KUBECONFIG=\"$tempkubeconfig\"\n"
|
||||
zshScript += "NO_PROXY=\",${NO_PROXY:-localhost},\"\n"
|
||||
zshScript += "NO_PROXY=\"${NO_PROXY//,localhost,/,}\"\n"
|
||||
zshScript += "NO_PROXY=\"${NO_PROXY//,127.0.0.1,/,}\"\n"
|
||||
zshScript += "NO_PROXY=\"localhost,127.0.0.1${NO_PROXY%,}\"\n"
|
||||
zshScript += "export NO_PROXY\n"
|
||||
zshScript += "unset tempkubeconfig\n"
|
||||
zshScript += "unset OLD_ZDOTDIR\n"
|
||||
await fsPromises.writeFile(zshScriptPath, zshScript.toString(), { mode: 0o644 })
|
||||
zshScript += `kubectlpath=\"${kubectlPath}"\n`;
|
||||
zshScript += `helmpath=\"${helmPath}"\n`;
|
||||
zshScript += "p=\":$kubectlpath:\"\n";
|
||||
zshScript += "d=\":$PATH:\"\n";
|
||||
zshScript += "d=${d//$p/:}\n";
|
||||
zshScript += "d=${d/#:/}\n";
|
||||
zshScript += "export PATH=\"$helmpath:$kubectlpath:${d/%:/}\"\n";
|
||||
zshScript += "export KUBECONFIG=\"$tempkubeconfig\"\n";
|
||||
zshScript += "NO_PROXY=\",${NO_PROXY:-localhost},\"\n";
|
||||
zshScript += "NO_PROXY=\"${NO_PROXY//,localhost,/,}\"\n";
|
||||
zshScript += "NO_PROXY=\"${NO_PROXY//,127.0.0.1,/,}\"\n";
|
||||
zshScript += "NO_PROXY=\"localhost,127.0.0.1${NO_PROXY%,}\"\n";
|
||||
zshScript += "export NO_PROXY\n";
|
||||
zshScript += "unset tempkubeconfig\n";
|
||||
zshScript += "unset OLD_ZDOTDIR\n";
|
||||
await fsPromises.writeFile(zshScriptPath, zshScript.toString(), { mode: 0o644 });
|
||||
}
|
||||
|
||||
protected getDownloadMirror() {
|
||||
const mirror = packageMirrors.get(userStore.preferences?.downloadMirror)
|
||||
const mirror = packageMirrors.get(userStore.preferences?.downloadMirror);
|
||||
if (mirror) {
|
||||
return mirror
|
||||
return mirror;
|
||||
}
|
||||
return packageMirrors.get("default") // MacOS packages are only available from default
|
||||
return packageMirrors.get("default"); // MacOS packages are only available from default
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import packageInfo from "../../package.json"
|
||||
import path from "path"
|
||||
import packageInfo from "../../package.json";
|
||||
import path from "path";
|
||||
import { Kubectl } from "../../src/main/kubectl";
|
||||
import { isWindows } from "../common/vars";
|
||||
|
||||
@ -7,39 +7,39 @@ jest.mock("../common/user-store");
|
||||
|
||||
describe("kubectlVersion", () => {
|
||||
it("returns bundled version if exactly same version used", async () => {
|
||||
const kubectl = new Kubectl(Kubectl.bundled().kubectlVersion)
|
||||
expect(kubectl.kubectlVersion).toBe(Kubectl.bundled().kubectlVersion)
|
||||
})
|
||||
const kubectl = new Kubectl(Kubectl.bundled().kubectlVersion);
|
||||
expect(kubectl.kubectlVersion).toBe(Kubectl.bundled().kubectlVersion);
|
||||
});
|
||||
|
||||
it("returns bundled version if same major.minor version is used", async () => {
|
||||
const { bundledKubectlVersion } = packageInfo.config;
|
||||
const kubectl = new Kubectl(bundledKubectlVersion);
|
||||
expect(kubectl.kubectlVersion).toBe(Kubectl.bundled().kubectlVersion)
|
||||
})
|
||||
})
|
||||
expect(kubectl.kubectlVersion).toBe(Kubectl.bundled().kubectlVersion);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getPath()", () => {
|
||||
it("returns path to downloaded kubectl binary", async () => {
|
||||
const { bundledKubectlVersion } = packageInfo.config;
|
||||
const kubectl = new Kubectl(bundledKubectlVersion);
|
||||
const kubectlPath = await kubectl.getPath()
|
||||
let binaryName = "kubectl"
|
||||
const kubectlPath = await kubectl.getPath();
|
||||
let binaryName = "kubectl";
|
||||
if (isWindows) {
|
||||
binaryName += ".exe"
|
||||
binaryName += ".exe";
|
||||
}
|
||||
const expectedPath = path.join(Kubectl.kubectlDir, Kubectl.bundledKubectlVersion, binaryName)
|
||||
expect(kubectlPath).toBe(expectedPath)
|
||||
})
|
||||
const expectedPath = path.join(Kubectl.kubectlDir, Kubectl.bundledKubectlVersion, binaryName);
|
||||
expect(kubectlPath).toBe(expectedPath);
|
||||
});
|
||||
|
||||
it("returns plain binary name if bundled kubectl is non-functional", async () => {
|
||||
const { bundledKubectlVersion } = packageInfo.config;
|
||||
const kubectl = new Kubectl(bundledKubectlVersion);
|
||||
jest.spyOn(kubectl, "getBundledPath").mockReturnValue("/invalid/path/kubectl")
|
||||
const kubectlPath = await kubectl.getPath()
|
||||
let binaryName = "kubectl"
|
||||
jest.spyOn(kubectl, "getBundledPath").mockReturnValue("/invalid/path/kubectl");
|
||||
const kubectlPath = await kubectl.getPath();
|
||||
let binaryName = "kubectl";
|
||||
if (isWindows) {
|
||||
binaryName += ".exe"
|
||||
binaryName += ".exe";
|
||||
}
|
||||
expect(kubectlPath).toBe(binaryName)
|
||||
})
|
||||
})
|
||||
expect(kubectlPath).toBe(binaryName);
|
||||
});
|
||||
});
|
||||
|
||||
@ -2,16 +2,16 @@ import http from "http";
|
||||
|
||||
export abstract class LensApi {
|
||||
protected respondJson(res: http.ServerResponse, content: {}, status = 200) {
|
||||
this.respond(res, JSON.stringify(content), "application/json", status)
|
||||
this.respond(res, JSON.stringify(content), "application/json", status);
|
||||
}
|
||||
|
||||
protected respondText(res: http.ServerResponse, content: string, status = 200) {
|
||||
this.respond(res, content, "text/plain", status)
|
||||
this.respond(res, content, "text/plain", status);
|
||||
}
|
||||
|
||||
protected respond(res: http.ServerResponse, content: string, contentType: string, status = 200) {
|
||||
res.setHeader("Content-Type", contentType)
|
||||
res.statusCode = status
|
||||
res.end(content)
|
||||
res.setHeader("Content-Type", contentType);
|
||||
res.statusCode = status;
|
||||
res.end(content);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
import path from "path"
|
||||
import fs from "fs"
|
||||
import request from "request"
|
||||
import { ensureDir, pathExists } from "fs-extra"
|
||||
import * as tar from "tar"
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import request from "request";
|
||||
import { ensureDir, pathExists } from "fs-extra";
|
||||
import * as tar from "tar";
|
||||
import { isWindows } from "../common/vars";
|
||||
import winston from "winston"
|
||||
import winston from "winston";
|
||||
|
||||
export type LensBinaryOpts = {
|
||||
version: string;
|
||||
@ -30,159 +30,159 @@ export class LensBinary {
|
||||
protected logger: Console | winston.Logger
|
||||
|
||||
constructor(opts: LensBinaryOpts) {
|
||||
const baseDir = opts.baseDir
|
||||
this.originalBinaryName = opts.originalBinaryName
|
||||
this.binaryName = opts.newBinaryName || opts.originalBinaryName
|
||||
this.binaryVersion = opts.version
|
||||
this.requestOpts = opts.requestOpts
|
||||
this.logger = console
|
||||
let arch = null
|
||||
const baseDir = opts.baseDir;
|
||||
this.originalBinaryName = opts.originalBinaryName;
|
||||
this.binaryName = opts.newBinaryName || opts.originalBinaryName;
|
||||
this.binaryVersion = opts.version;
|
||||
this.requestOpts = opts.requestOpts;
|
||||
this.logger = console;
|
||||
let arch = null;
|
||||
|
||||
if (process.arch == "x64") {
|
||||
arch = "amd64"
|
||||
arch = "amd64";
|
||||
}
|
||||
else if (process.arch == "x86" || process.arch == "ia32") {
|
||||
arch = "386"
|
||||
arch = "386";
|
||||
}
|
||||
else {
|
||||
arch = process.arch
|
||||
arch = process.arch;
|
||||
}
|
||||
this.arch = arch
|
||||
this.platformName = isWindows ? "windows" : process.platform
|
||||
this.dirname = path.normalize(path.join(baseDir, this.binaryName))
|
||||
this.arch = arch;
|
||||
this.platformName = isWindows ? "windows" : process.platform;
|
||||
this.dirname = path.normalize(path.join(baseDir, this.binaryName));
|
||||
if (isWindows) {
|
||||
this.binaryName = this.binaryName + ".exe"
|
||||
this.originalBinaryName = this.originalBinaryName + ".exe"
|
||||
this.binaryName = this.binaryName + ".exe";
|
||||
this.originalBinaryName = this.originalBinaryName + ".exe";
|
||||
}
|
||||
const tarName = this.getTarName()
|
||||
const tarName = this.getTarName();
|
||||
if (tarName) {
|
||||
this.tarPath = path.join(this.dirname, tarName)
|
||||
this.tarPath = path.join(this.dirname, tarName);
|
||||
}
|
||||
}
|
||||
|
||||
public setLogger(logger: Console | winston.Logger) {
|
||||
this.logger = logger
|
||||
this.logger = logger;
|
||||
}
|
||||
|
||||
protected binaryDir() {
|
||||
throw new Error("binaryDir not implemented")
|
||||
throw new Error("binaryDir not implemented");
|
||||
}
|
||||
|
||||
public async binaryPath() {
|
||||
await this.ensureBinary()
|
||||
return this.getBinaryPath()
|
||||
await this.ensureBinary();
|
||||
return this.getBinaryPath();
|
||||
}
|
||||
|
||||
protected getTarName(): string | null {
|
||||
return null
|
||||
return null;
|
||||
}
|
||||
|
||||
protected getUrl() {
|
||||
return ""
|
||||
return "";
|
||||
}
|
||||
|
||||
protected getBinaryPath() {
|
||||
return ""
|
||||
return "";
|
||||
}
|
||||
|
||||
protected getOriginalBinaryPath() {
|
||||
return ""
|
||||
return "";
|
||||
}
|
||||
|
||||
public getBinaryDir() {
|
||||
return path.dirname(this.getBinaryPath())
|
||||
return path.dirname(this.getBinaryPath());
|
||||
}
|
||||
|
||||
public async binDir() {
|
||||
try {
|
||||
await this.ensureBinary()
|
||||
return this.dirname
|
||||
await this.ensureBinary();
|
||||
return this.dirname;
|
||||
} catch (err) {
|
||||
this.logger.error(err)
|
||||
return ""
|
||||
this.logger.error(err);
|
||||
return "";
|
||||
}
|
||||
}
|
||||
|
||||
protected async checkBinary() {
|
||||
const exists = await pathExists(this.getBinaryPath())
|
||||
return exists
|
||||
const exists = await pathExists(this.getBinaryPath());
|
||||
return exists;
|
||||
}
|
||||
|
||||
public async ensureBinary() {
|
||||
const isValid = await this.checkBinary()
|
||||
const isValid = await this.checkBinary();
|
||||
if (!isValid) {
|
||||
await this.downloadBinary().catch((error) => {
|
||||
this.logger.error(error)
|
||||
this.logger.error(error);
|
||||
});
|
||||
if (this.tarPath) await this.untarBinary()
|
||||
if (this.originalBinaryName != this.binaryName) await this.renameBinary()
|
||||
this.logger.info(`${this.originalBinaryName} has been downloaded to ${this.getBinaryPath()}`)
|
||||
if (this.tarPath) await this.untarBinary();
|
||||
if (this.originalBinaryName != this.binaryName) await this.renameBinary();
|
||||
this.logger.info(`${this.originalBinaryName} has been downloaded to ${this.getBinaryPath()}`);
|
||||
}
|
||||
}
|
||||
|
||||
protected async untarBinary() {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
this.logger.debug(`Extracting ${this.originalBinaryName} binary`)
|
||||
this.logger.debug(`Extracting ${this.originalBinaryName} binary`);
|
||||
tar.x({
|
||||
file: this.tarPath,
|
||||
cwd: this.dirname
|
||||
}).then((_ => {
|
||||
resolve()
|
||||
}))
|
||||
})
|
||||
resolve();
|
||||
}));
|
||||
});
|
||||
}
|
||||
|
||||
protected async renameBinary() {
|
||||
return new Promise<void>((resolve, reject) => {
|
||||
this.logger.debug(`Renaming ${this.originalBinaryName} binary to ${this.binaryName}`)
|
||||
this.logger.debug(`Renaming ${this.originalBinaryName} binary to ${this.binaryName}`);
|
||||
fs.rename(this.getOriginalBinaryPath(), this.getBinaryPath(), (err) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
reject(err);
|
||||
}
|
||||
else {
|
||||
resolve()
|
||||
resolve();
|
||||
}
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
protected async downloadBinary() {
|
||||
const binaryPath = this.tarPath || this.getBinaryPath()
|
||||
await ensureDir(this.getBinaryDir(), 0o755)
|
||||
const binaryPath = this.tarPath || this.getBinaryPath();
|
||||
await ensureDir(this.getBinaryDir(), 0o755);
|
||||
|
||||
const file = fs.createWriteStream(binaryPath)
|
||||
const url = this.getUrl()
|
||||
const file = fs.createWriteStream(binaryPath);
|
||||
const url = this.getUrl();
|
||||
|
||||
this.logger.info(`Downloading ${this.originalBinaryName} ${this.binaryVersion} from ${url} to ${binaryPath}`)
|
||||
this.logger.info(`Downloading ${this.originalBinaryName} ${this.binaryVersion} from ${url} to ${binaryPath}`);
|
||||
const requestOpts: request.UriOptions & request.CoreOptions = {
|
||||
uri: url,
|
||||
gzip: true,
|
||||
...this.requestOpts
|
||||
}
|
||||
};
|
||||
|
||||
const stream = request(requestOpts)
|
||||
const stream = request(requestOpts);
|
||||
|
||||
stream.on("complete", () => {
|
||||
this.logger.info(`Download of ${this.originalBinaryName} finished`)
|
||||
file.end()
|
||||
})
|
||||
this.logger.info(`Download of ${this.originalBinaryName} finished`);
|
||||
file.end();
|
||||
});
|
||||
|
||||
stream.on("error", (error) => {
|
||||
this.logger.error(error)
|
||||
this.logger.error(error);
|
||||
fs.unlink(binaryPath, () => {
|
||||
// do nothing
|
||||
})
|
||||
throw(error)
|
||||
})
|
||||
});
|
||||
throw(error);
|
||||
});
|
||||
return new Promise((resolve, reject) => {
|
||||
file.on("close", () => {
|
||||
this.logger.debug(`${this.originalBinaryName} binary download closed`)
|
||||
this.logger.debug(`${this.originalBinaryName} binary download closed`);
|
||||
if (!this.tarPath) fs.chmod(binaryPath, 0o755, (err) => {
|
||||
if (err) reject(err);
|
||||
})
|
||||
resolve()
|
||||
})
|
||||
stream.pipe(file)
|
||||
})
|
||||
});
|
||||
resolve();
|
||||
});
|
||||
stream.pipe(file);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -3,13 +3,13 @@ import http from "http";
|
||||
import spdy from "spdy";
|
||||
import httpProxy from "http-proxy";
|
||||
import url from "url";
|
||||
import * as WebSocket from "ws"
|
||||
import { apiPrefix, apiKubePrefix } from "../common/vars"
|
||||
import * as WebSocket from "ws";
|
||||
import { apiPrefix, apiKubePrefix } from "../common/vars";
|
||||
import { openShell } from "./node-shell-session";
|
||||
import { Router } from "./router"
|
||||
import { ClusterManager } from "./cluster-manager"
|
||||
import { Router } from "./router";
|
||||
import { ClusterManager } from "./cluster-manager";
|
||||
import { ContextHandler } from "./context-handler";
|
||||
import logger from "./logger"
|
||||
import logger from "./logger";
|
||||
|
||||
export class LensProxy {
|
||||
protected origin: string
|
||||
@ -23,7 +23,7 @@ export class LensProxy {
|
||||
}
|
||||
|
||||
private constructor(protected port: number, protected clusterManager: ClusterManager) {
|
||||
this.origin = `http://localhost:${port}`
|
||||
this.origin = `http://localhost:${port}`;
|
||||
this.router = new Router();
|
||||
}
|
||||
|
||||
@ -35,8 +35,8 @@ export class LensProxy {
|
||||
|
||||
close() {
|
||||
logger.info("Closing proxy server");
|
||||
this.proxyServer.close()
|
||||
this.closed = true
|
||||
this.proxyServer.close();
|
||||
this.closed = true;
|
||||
}
|
||||
|
||||
protected buildCustomProxy(): http.Server {
|
||||
@ -47,66 +47,66 @@ export class LensProxy {
|
||||
protocols: ["http/1.1", "spdy/3.1"]
|
||||
}
|
||||
}, (req: http.IncomingMessage, res: http.ServerResponse) => {
|
||||
this.handleRequest(proxy, req, res)
|
||||
})
|
||||
this.handleRequest(proxy, req, res);
|
||||
});
|
||||
spdyProxy.on("upgrade", (req: http.IncomingMessage, socket: net.Socket, head: Buffer) => {
|
||||
if (req.url.startsWith(`${apiPrefix}?`)) {
|
||||
this.handleWsUpgrade(req, socket, head)
|
||||
this.handleWsUpgrade(req, socket, head);
|
||||
} else {
|
||||
this.handleProxyUpgrade(proxy, req, socket, head)
|
||||
this.handleProxyUpgrade(proxy, req, socket, head);
|
||||
}
|
||||
})
|
||||
});
|
||||
spdyProxy.on("error", (err) => {
|
||||
logger.error("proxy error", err)
|
||||
})
|
||||
return spdyProxy
|
||||
logger.error("proxy error", err);
|
||||
});
|
||||
return spdyProxy;
|
||||
}
|
||||
|
||||
protected async handleProxyUpgrade(proxy: httpProxy, req: http.IncomingMessage, socket: net.Socket, head: Buffer) {
|
||||
const cluster = this.clusterManager.getClusterForRequest(req)
|
||||
const cluster = this.clusterManager.getClusterForRequest(req);
|
||||
if (cluster) {
|
||||
const proxyUrl = await cluster.contextHandler.resolveAuthProxyUrl() + req.url.replace(apiKubePrefix, "")
|
||||
const apiUrl = url.parse(cluster.apiUrl)
|
||||
const pUrl = url.parse(proxyUrl)
|
||||
const connectOpts = { port: parseInt(pUrl.port), host: pUrl.hostname }
|
||||
const proxySocket = new net.Socket()
|
||||
const proxyUrl = await cluster.contextHandler.resolveAuthProxyUrl() + req.url.replace(apiKubePrefix, "");
|
||||
const apiUrl = url.parse(cluster.apiUrl);
|
||||
const pUrl = url.parse(proxyUrl);
|
||||
const connectOpts = { port: parseInt(pUrl.port), host: pUrl.hostname };
|
||||
const proxySocket = new net.Socket();
|
||||
proxySocket.connect(connectOpts, () => {
|
||||
proxySocket.write(`${req.method} ${pUrl.path} HTTP/1.1\r\n`)
|
||||
proxySocket.write(`Host: ${apiUrl.host}\r\n`)
|
||||
proxySocket.write(`${req.method} ${pUrl.path} HTTP/1.1\r\n`);
|
||||
proxySocket.write(`Host: ${apiUrl.host}\r\n`);
|
||||
for (let i = 0; i < req.rawHeaders.length; i += 2) {
|
||||
const key = req.rawHeaders[i]
|
||||
const key = req.rawHeaders[i];
|
||||
if (key !== "Host" && key !== "Authorization") {
|
||||
proxySocket.write(`${req.rawHeaders[i]}: ${req.rawHeaders[i+1]}\r\n`)
|
||||
proxySocket.write(`${req.rawHeaders[i]}: ${req.rawHeaders[i+1]}\r\n`);
|
||||
}
|
||||
}
|
||||
proxySocket.write("\r\n")
|
||||
proxySocket.write(head)
|
||||
})
|
||||
proxySocket.write("\r\n");
|
||||
proxySocket.write(head);
|
||||
});
|
||||
|
||||
proxySocket.setKeepAlive(true)
|
||||
socket.setKeepAlive(true)
|
||||
proxySocket.setTimeout(0)
|
||||
socket.setTimeout(0)
|
||||
proxySocket.setKeepAlive(true);
|
||||
socket.setKeepAlive(true);
|
||||
proxySocket.setTimeout(0);
|
||||
socket.setTimeout(0);
|
||||
|
||||
proxySocket.on('data', function (chunk) {
|
||||
socket.write(chunk)
|
||||
})
|
||||
socket.write(chunk);
|
||||
});
|
||||
proxySocket.on('end', function () {
|
||||
socket.end()
|
||||
})
|
||||
socket.end();
|
||||
});
|
||||
proxySocket.on('error', function (err) {
|
||||
socket.write("HTTP/" + req.httpVersion + " 500 Connection error\r\n\r\n");
|
||||
socket.end()
|
||||
})
|
||||
socket.end();
|
||||
});
|
||||
socket.on('data', function (chunk) {
|
||||
proxySocket.write(chunk)
|
||||
})
|
||||
proxySocket.write(chunk);
|
||||
});
|
||||
socket.on('end', function () {
|
||||
proxySocket.end()
|
||||
})
|
||||
proxySocket.end();
|
||||
});
|
||||
socket.on('error', function () {
|
||||
proxySocket.end()
|
||||
})
|
||||
proxySocket.end();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@ -120,29 +120,29 @@ export class LensProxy {
|
||||
logger.debug("Failed proxy to target: " + JSON.stringify(target, null, 2));
|
||||
if (req.method === "GET" && (!res.statusCode || res.statusCode >= 500)) {
|
||||
const reqId = this.getRequestId(req);
|
||||
const retryCount = this.retryCounters.get(reqId) || 0
|
||||
const timeoutMs = retryCount * 250
|
||||
const retryCount = this.retryCounters.get(reqId) || 0;
|
||||
const timeoutMs = retryCount * 250;
|
||||
if (retryCount < 20) {
|
||||
logger.debug(`Retrying proxy request to url: ${reqId}`)
|
||||
logger.debug(`Retrying proxy request to url: ${reqId}`);
|
||||
setTimeout(() => {
|
||||
this.retryCounters.set(reqId, retryCount + 1)
|
||||
this.handleRequest(proxy, req, res)
|
||||
}, timeoutMs)
|
||||
this.retryCounters.set(reqId, retryCount + 1);
|
||||
this.handleRequest(proxy, req, res);
|
||||
}, timeoutMs);
|
||||
}
|
||||
}
|
||||
}
|
||||
try {
|
||||
res.writeHead(500).end("Oops, something went wrong.")
|
||||
res.writeHead(500).end("Oops, something went wrong.");
|
||||
} catch (e) {
|
||||
logger.error(`[LENS-PROXY]: Failed to write headers: `, e)
|
||||
logger.error(`[LENS-PROXY]: Failed to write headers: `, e);
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
return proxy;
|
||||
}
|
||||
|
||||
protected createWsListener(): WebSocket.Server {
|
||||
const ws = new WebSocket.Server({ noServer: true })
|
||||
const ws = new WebSocket.Server({ noServer: true });
|
||||
return ws.on("connection", ((socket: WebSocket, req: http.IncomingMessage) => {
|
||||
const cluster = this.clusterManager.getClusterForRequest(req);
|
||||
const nodeParam = url.parse(req.url, true).query["node"]?.toString();
|
||||
@ -152,10 +152,10 @@ export class LensProxy {
|
||||
|
||||
protected async getProxyTarget(req: http.IncomingMessage, contextHandler: ContextHandler): Promise<httpProxy.ServerOptions> {
|
||||
if (req.url.startsWith(apiKubePrefix)) {
|
||||
delete req.headers.authorization
|
||||
req.url = req.url.replace(apiKubePrefix, "")
|
||||
const isWatchRequest = req.url.includes("watch=")
|
||||
return await contextHandler.getApiTarget(isWatchRequest)
|
||||
delete req.headers.authorization;
|
||||
req.url = req.url.replace(apiKubePrefix, "");
|
||||
const isWatchRequest = req.url.includes("watch=");
|
||||
return await contextHandler.getApiTarget(isWatchRequest);
|
||||
}
|
||||
}
|
||||
|
||||
@ -164,9 +164,9 @@ export class LensProxy {
|
||||
}
|
||||
|
||||
protected async handleRequest(proxy: httpProxy, req: http.IncomingMessage, res: http.ServerResponse) {
|
||||
const cluster = this.clusterManager.getClusterForRequest(req)
|
||||
const cluster = this.clusterManager.getClusterForRequest(req);
|
||||
if (cluster) {
|
||||
const proxyTarget = await this.getProxyTarget(req, cluster.contextHandler)
|
||||
const proxyTarget = await this.getProxyTarget(req, cluster.contextHandler);
|
||||
if (proxyTarget) {
|
||||
// allow to fetch apis in "clusterId.localhost:port" from "localhost:port"
|
||||
res.setHeader("Access-Control-Allow-Origin", this.origin);
|
||||
|
||||
@ -1,13 +1,13 @@
|
||||
import { app, remote } from "electron";
|
||||
import winston from "winston"
|
||||
import winston from "winston";
|
||||
import { isDebugging } from "../common/vars";
|
||||
|
||||
const logLevel = process.env.LOG_LEVEL ? process.env.LOG_LEVEL : isDebugging ? "debug" : "info"
|
||||
const logLevel = process.env.LOG_LEVEL ? process.env.LOG_LEVEL : isDebugging ? "debug" : "info";
|
||||
|
||||
const consoleOptions: winston.transports.ConsoleTransportOptions = {
|
||||
handleExceptions: false,
|
||||
level: logLevel,
|
||||
}
|
||||
};
|
||||
|
||||
const fileOptions: winston.transports.FileTransportOptions = {
|
||||
handleExceptions: false,
|
||||
@ -17,7 +17,7 @@ const fileOptions: winston.transports.FileTransportOptions = {
|
||||
maxsize: 16 * 1024,
|
||||
maxFiles: 16,
|
||||
tailable: true,
|
||||
}
|
||||
};
|
||||
|
||||
const logger = winston.createLogger({
|
||||
format: winston.format.combine(
|
||||
@ -30,4 +30,4 @@ const logger = winston.createLogger({
|
||||
],
|
||||
});
|
||||
|
||||
export default logger
|
||||
export default logger;
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
import { app, BrowserWindow, dialog, ipcMain, IpcMainEvent, Menu, MenuItem, MenuItemConstructorOptions, webContents, shell } from "electron"
|
||||
import { app, BrowserWindow, dialog, ipcMain, IpcMainEvent, Menu, MenuItem, MenuItemConstructorOptions, webContents, shell } from "electron";
|
||||
import { autorun } from "mobx";
|
||||
import { WindowManager } from "./window-manager";
|
||||
import { appName, isMac, isWindows, isTestEnv } from "../common/vars";
|
||||
@ -25,14 +25,14 @@ export function showAbout(browserWindow: BrowserWindow) {
|
||||
`Electron: ${process.versions.electron}`,
|
||||
`Chrome: ${process.versions.chrome}`,
|
||||
`Copyright 2020 Mirantis, Inc.`,
|
||||
]
|
||||
];
|
||||
dialog.showMessageBoxSync(browserWindow, {
|
||||
title: `${isWindows ? " ".repeat(2) : ""}${appName}`,
|
||||
type: "info",
|
||||
buttons: ["Close"],
|
||||
message: `Lens`,
|
||||
detail: appInfo.join("\r\n")
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
export function buildMenu(windowManager: WindowManager) {
|
||||
@ -44,7 +44,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
function activeClusterOnly(menuItems: MenuItemConstructorOptions[]) {
|
||||
if (!windowManager.activeClusterId) {
|
||||
menuItems.forEach(item => {
|
||||
item.enabled = false
|
||||
item.enabled = false;
|
||||
});
|
||||
}
|
||||
return menuItems;
|
||||
@ -61,7 +61,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
{
|
||||
label: "About Lens",
|
||||
click(menuItem: MenuItem, browserWindow: BrowserWindow) {
|
||||
showAbout(browserWindow)
|
||||
showAbout(browserWindow);
|
||||
}
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@ -69,14 +69,14 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Preferences',
|
||||
accelerator: 'CmdOrCtrl+,',
|
||||
click() {
|
||||
navigate(preferencesURL())
|
||||
navigate(preferencesURL());
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Extensions',
|
||||
accelerator: 'CmdOrCtrl+Shift+E',
|
||||
click() {
|
||||
navigate(extensionsURL())
|
||||
navigate(extensionsURL());
|
||||
}
|
||||
},
|
||||
{ type: 'separator' },
|
||||
@ -90,7 +90,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Quit',
|
||||
accelerator: 'Cmd+Q',
|
||||
click() {
|
||||
exitApp()
|
||||
exitApp();
|
||||
}
|
||||
}
|
||||
]
|
||||
@ -103,7 +103,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Add Cluster',
|
||||
accelerator: 'CmdOrCtrl+Shift+A',
|
||||
click() {
|
||||
navigate(addClusterURL())
|
||||
navigate(addClusterURL());
|
||||
}
|
||||
},
|
||||
...activeClusterOnly([
|
||||
@ -115,7 +115,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
params: {
|
||||
clusterId: windowManager.activeClusterId
|
||||
}
|
||||
}))
|
||||
}));
|
||||
}
|
||||
}
|
||||
]),
|
||||
@ -125,14 +125,14 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Preferences',
|
||||
accelerator: 'Ctrl+,',
|
||||
click() {
|
||||
navigate(preferencesURL())
|
||||
navigate(preferencesURL());
|
||||
}
|
||||
},
|
||||
{
|
||||
label: 'Extensions',
|
||||
accelerator: 'Ctrl+Shift+E',
|
||||
click() {
|
||||
navigate(extensionsURL())
|
||||
navigate(extensionsURL());
|
||||
}
|
||||
}
|
||||
]),
|
||||
@ -147,7 +147,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Exit',
|
||||
accelerator: 'Alt+F4',
|
||||
click() {
|
||||
exitApp()
|
||||
exitApp();
|
||||
}
|
||||
}
|
||||
])
|
||||
@ -183,7 +183,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
label: 'Forward',
|
||||
accelerator: 'CmdOrCtrl+]',
|
||||
click() {
|
||||
webContents.getFocusedWebContents()?.goForward()
|
||||
webContents.getFocusedWebContents()?.goForward();
|
||||
}
|
||||
},
|
||||
{
|
||||
@ -209,7 +209,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
{
|
||||
label: "What's new?",
|
||||
click() {
|
||||
navigate(whatsNewURL())
|
||||
navigate(whatsNewURL());
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -222,7 +222,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
{
|
||||
label: "About Lens",
|
||||
click(menuItem: MenuItem, browserWindow: BrowserWindow) {
|
||||
showAbout(browserWindow)
|
||||
showAbout(browserWindow);
|
||||
}
|
||||
}
|
||||
])
|
||||
@ -236,7 +236,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
edit: editMenu,
|
||||
view: viewMenu,
|
||||
help: helpMenu,
|
||||
}
|
||||
};
|
||||
|
||||
// Modify menu from extensions-api
|
||||
menuRegistry.getItems().forEach(({ parentId, ...menuItem }) => {
|
||||
@ -244,12 +244,12 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
const topMenu = appMenu[parentId as MenuTopId].submenu as MenuItemConstructorOptions[];
|
||||
topMenu.push(menuItem);
|
||||
} catch (err) {
|
||||
logger.error(`[MENU]: can't register menu item, parentId=${parentId}`, { menuItem })
|
||||
logger.error(`[MENU]: can't register menu item, parentId=${parentId}`, { menuItem });
|
||||
}
|
||||
})
|
||||
});
|
||||
|
||||
if (!isMac) {
|
||||
delete appMenu.mac
|
||||
delete appMenu.mac;
|
||||
}
|
||||
|
||||
const menu = Menu.buildFromTemplate(Object.values(appMenu));
|
||||
@ -259,9 +259,9 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
// this is a workaround for the test environment (spectron) not being able to directly access
|
||||
// the application menus (https://github.com/electron-userland/spectron/issues/21)
|
||||
ipcMain.on('test-menu-item-click', (event: IpcMainEvent, ...names: string[]) => {
|
||||
let menu: Menu = Menu.getApplicationMenu()
|
||||
let menu: Menu = Menu.getApplicationMenu();
|
||||
const parentLabels: string[] = [];
|
||||
let menuItem: MenuItem
|
||||
let menuItem: MenuItem;
|
||||
|
||||
for (const name of names) {
|
||||
parentLabels.push(name);
|
||||
@ -272,7 +272,7 @@ export function buildMenu(windowManager: WindowManager) {
|
||||
menu = menuItem.submenu;
|
||||
}
|
||||
|
||||
const menuPath: string = parentLabels.join(" -> ")
|
||||
const menuPath: string = parentLabels.join(" -> ");
|
||||
if (!menuItem) {
|
||||
logger.info(`[MENU:test-menu-item-click] Cannot find menu item ${menuPath}`);
|
||||
return;
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
import * as WebSocket from "ws"
|
||||
import * as pty from "node-pty"
|
||||
import * as WebSocket from "ws";
|
||||
import * as pty from "node-pty";
|
||||
import { ShellSession } from "./shell-session";
|
||||
import { v4 as uuid } from "uuid"
|
||||
import * as k8s from "@kubernetes/client-node"
|
||||
import { KubeConfig } from "@kubernetes/client-node"
|
||||
import { Cluster } from "./cluster"
|
||||
import { v4 as uuid } from "uuid";
|
||||
import * as k8s from "@kubernetes/client-node";
|
||||
import { KubeConfig } from "@kubernetes/client-node";
|
||||
import { Cluster } from "./cluster";
|
||||
import logger from "./logger";
|
||||
import { appEventBus } from "../common/event-bus"
|
||||
import { appEventBus } from "../common/event-bus";
|
||||
|
||||
export class NodeShellSession extends ShellSession {
|
||||
protected nodeName: string;
|
||||
@ -14,23 +14,23 @@ export class NodeShellSession extends ShellSession {
|
||||
protected kc: KubeConfig
|
||||
|
||||
constructor(socket: WebSocket, cluster: Cluster, nodeName: string) {
|
||||
super(socket, cluster)
|
||||
this.nodeName = nodeName
|
||||
this.podId = `node-shell-${uuid()}`
|
||||
this.kc = cluster.getProxyKubeconfig()
|
||||
super(socket, cluster);
|
||||
this.nodeName = nodeName;
|
||||
this.podId = `node-shell-${uuid()}`;
|
||||
this.kc = cluster.getProxyKubeconfig();
|
||||
}
|
||||
|
||||
public async open() {
|
||||
const shell = await this.kubectl.getPath()
|
||||
let args = []
|
||||
const shell = await this.kubectl.getPath();
|
||||
let args = [];
|
||||
if (this.createNodeShellPod(this.podId, this.nodeName)) {
|
||||
await this.waitForRunningPod(this.podId).catch((error) => {
|
||||
this.exit(1001)
|
||||
})
|
||||
this.exit(1001);
|
||||
});
|
||||
}
|
||||
args = ["exec", "-i", "-t", "-n", "kube-system", this.podId, "--", "sh", "-c", "((clear && bash) || (clear && ash) || (clear && sh))"]
|
||||
args = ["exec", "-i", "-t", "-n", "kube-system", this.podId, "--", "sh", "-c", "((clear && bash) || (clear && ash) || (clear && sh))"];
|
||||
|
||||
const shellEnv = await this.getCachedShellEnv()
|
||||
const shellEnv = await this.getCachedShellEnv();
|
||||
this.shellProcess = pty.spawn(shell, args, {
|
||||
cols: 80,
|
||||
cwd: this.cwd() || shellEnv["HOME"],
|
||||
@ -39,19 +39,19 @@ export class NodeShellSession extends ShellSession {
|
||||
rows: 30,
|
||||
});
|
||||
this.running = true;
|
||||
this.pipeStdout()
|
||||
this.pipeStdin()
|
||||
this.closeWebsocketOnProcessExit()
|
||||
this.exitProcessOnWebsocketClose()
|
||||
this.pipeStdout();
|
||||
this.pipeStdin();
|
||||
this.closeWebsocketOnProcessExit();
|
||||
this.exitProcessOnWebsocketClose();
|
||||
|
||||
appEventBus.emit({name: "node-shell", action: "open"})
|
||||
appEventBus.emit({name: "node-shell", action: "open"});
|
||||
}
|
||||
|
||||
protected exit(code = 1000) {
|
||||
if (this.podId) {
|
||||
this.deleteNodeShellPod()
|
||||
this.deleteNodeShellPod();
|
||||
}
|
||||
super.exit(code)
|
||||
super.exit(code);
|
||||
}
|
||||
|
||||
protected async createNodeShellPod(podId: string, nodeName: string) {
|
||||
@ -86,19 +86,19 @@ export class NodeShellSession extends ShellSession {
|
||||
}
|
||||
} as k8s.V1Pod;
|
||||
await k8sApi.createNamespacedPod("kube-system", pod).catch((error) => {
|
||||
logger.error(error)
|
||||
return false
|
||||
})
|
||||
return true
|
||||
logger.error(error);
|
||||
return false;
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
protected getKubeConfig() {
|
||||
if (this.kc) {
|
||||
return this.kc
|
||||
return this.kc;
|
||||
}
|
||||
this.kc = new k8s.KubeConfig();
|
||||
this.kc.loadFromFile(this.kubeconfigPath)
|
||||
return this.kc
|
||||
this.kc.loadFromFile(this.kubeconfigPath);
|
||||
return this.kc;
|
||||
}
|
||||
|
||||
protected waitForRunningPod(podId: string) {
|
||||
@ -110,36 +110,36 @@ export class NodeShellSession extends ShellSession {
|
||||
// callback is called for each received object.
|
||||
(_type, obj) => {
|
||||
if (obj.metadata.name == podId && obj.status.phase === "Running") {
|
||||
resolve(true)
|
||||
resolve(true);
|
||||
}
|
||||
},
|
||||
// done callback is called if the watch terminates normally
|
||||
(err) => {
|
||||
logger.error(err)
|
||||
reject(false)
|
||||
logger.error(err);
|
||||
reject(false);
|
||||
}
|
||||
);
|
||||
setTimeout(() => {
|
||||
req.abort();
|
||||
reject(false);
|
||||
}, 120 * 1000);
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
protected deleteNodeShellPod() {
|
||||
const kc = this.getKubeConfig();
|
||||
const k8sApi = kc.makeApiClient(k8s.CoreV1Api);
|
||||
k8sApi.deleteNamespacedPod(this.podId, "kube-system")
|
||||
k8sApi.deleteNamespacedPod(this.podId, "kube-system");
|
||||
}
|
||||
}
|
||||
|
||||
export async function openShell(socket: WebSocket, cluster: Cluster, nodeName?: string): Promise<ShellSession> {
|
||||
let shell: ShellSession;
|
||||
if (nodeName) {
|
||||
shell = new NodeShellSession(socket, cluster, nodeName)
|
||||
shell = new NodeShellSession(socket, cluster, nodeName);
|
||||
} else {
|
||||
shell = new ShellSession(socket, cluster);
|
||||
}
|
||||
shell.open()
|
||||
shell.open();
|
||||
return shell;
|
||||
}
|
||||
|
||||
@ -1,15 +1,15 @@
|
||||
import net, { AddressInfo } from "net"
|
||||
import logger from "./logger"
|
||||
import net, { AddressInfo } from "net";
|
||||
import logger from "./logger";
|
||||
|
||||
// todo: check https://github.com/http-party/node-portfinder ?
|
||||
|
||||
export async function getFreePort(): Promise<number> {
|
||||
logger.debug("Lookup new free port..");
|
||||
return new Promise((resolve, reject) => {
|
||||
const server = net.createServer()
|
||||
server.unref()
|
||||
const server = net.createServer();
|
||||
server.unref();
|
||||
server.on("listening", () => {
|
||||
const port = (server.address() as AddressInfo).port
|
||||
const port = (server.address() as AddressInfo).port;
|
||||
server.close(() => resolve(port));
|
||||
logger.debug(`New port found: ${port}`);
|
||||
});
|
||||
@ -17,6 +17,6 @@ export async function getFreePort(): Promise<number> {
|
||||
logger.error(`Can't resolve new port: "${error}"`);
|
||||
reject(error);
|
||||
});
|
||||
server.listen({ host: "127.0.0.1", port: 0 })
|
||||
})
|
||||
server.listen({ host: "127.0.0.1", port: 0 });
|
||||
});
|
||||
}
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import { EventEmitter } from 'events'
|
||||
import { getFreePort } from "./port"
|
||||
import { EventEmitter } from 'events';
|
||||
import { getFreePort } from "./port";
|
||||
|
||||
let newPort = 0;
|
||||
|
||||
@ -8,24 +8,24 @@ jest.mock("net", () => {
|
||||
createServer() {
|
||||
return new class MockServer extends EventEmitter {
|
||||
listen = jest.fn(() => {
|
||||
this.emit('listening')
|
||||
return this
|
||||
this.emit('listening');
|
||||
return this;
|
||||
})
|
||||
address = () => {
|
||||
newPort = Math.round(Math.random() * 10000)
|
||||
newPort = Math.round(Math.random() * 10000);
|
||||
return {
|
||||
port: newPort
|
||||
}
|
||||
};
|
||||
}
|
||||
unref = jest.fn()
|
||||
close = jest.fn(cb => cb())
|
||||
}
|
||||
};
|
||||
},
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
describe("getFreePort", () => {
|
||||
it("finds the next free port", async () => {
|
||||
return expect(getFreePort()).resolves.toEqual(newPort);
|
||||
})
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
import { PrometheusLens } from "./lens"
|
||||
import { CoreV1Api } from "@kubernetes/client-node"
|
||||
import { PrometheusLens } from "./lens";
|
||||
import { CoreV1Api } from "@kubernetes/client-node";
|
||||
import { PrometheusService } from "./provider-registry";
|
||||
import logger from "../logger"
|
||||
import logger from "../logger";
|
||||
|
||||
export class PrometheusHelm extends PrometheusLens {
|
||||
id = "helm"
|
||||
@ -9,21 +9,21 @@ export class PrometheusHelm extends PrometheusLens {
|
||||
rateAccuracy = "5m"
|
||||
|
||||
public async getPrometheusService(client: CoreV1Api): Promise<PrometheusService> {
|
||||
const labelSelector = "app=prometheus,component=server,heritage=Helm"
|
||||
const labelSelector = "app=prometheus,component=server,heritage=Helm";
|
||||
try {
|
||||
const serviceList = await client.listServiceForAllNamespaces(false, "", null, labelSelector)
|
||||
const service = serviceList.body.items[0]
|
||||
if (!service) return
|
||||
const serviceList = await client.listServiceForAllNamespaces(false, "", null, labelSelector);
|
||||
const service = serviceList.body.items[0];
|
||||
if (!service) return;
|
||||
|
||||
return {
|
||||
id: this.id,
|
||||
namespace: service.metadata.namespace,
|
||||
service: service.metadata.name,
|
||||
port: service.spec.ports[0].port
|
||||
}
|
||||
};
|
||||
} catch(error) {
|
||||
logger.warn(`PrometheusHelm: failed to list services: ${error.toString()}`)
|
||||
return
|
||||
logger.warn(`PrometheusHelm: failed to list services: ${error.toString()}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -1,6 +1,6 @@
|
||||
import { PrometheusProvider, PrometheusQueryOpts, PrometheusQuery, PrometheusService } from "./provider-registry";
|
||||
import { CoreV1Api } from "@kubernetes/client-node";
|
||||
import logger from "../logger"
|
||||
import logger from "../logger";
|
||||
|
||||
export class PrometheusLens implements PrometheusProvider {
|
||||
id = "lens"
|
||||
@ -9,16 +9,16 @@ export class PrometheusLens implements PrometheusProvider {
|
||||
|
||||
public async getPrometheusService(client: CoreV1Api): Promise<PrometheusService> {
|
||||
try {
|
||||
const resp = await client.readNamespacedService("prometheus", "lens-metrics")
|
||||
const service = resp.body
|
||||
const resp = await client.readNamespacedService("prometheus", "lens-metrics");
|
||||
const service = resp.body;
|
||||
return {
|
||||
id: this.id,
|
||||
namespace: service.metadata.namespace,
|
||||
service: service.metadata.name,
|
||||
port: service.spec.ports[0].port
|
||||
}
|
||||
};
|
||||
} catch(error) {
|
||||
logger.warn(`PrometheusLens: failed to list services: ${error.response.body.message}`)
|
||||
logger.warn(`PrometheusLens: failed to list services: ${error.response.body.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
@ -42,7 +42,7 @@ export class PrometheusLens implements PrometheusProvider {
|
||||
podCapacity: `sum(kube_node_status_capacity{node=~"${opts.nodes}", resource="pods"}) by (component)`,
|
||||
fsSize: `sum(node_filesystem_size_bytes{kubernetes_node=~"${opts.nodes}", mountpoint="/"}) by (kubernetes_node)`,
|
||||
fsUsage: `sum(node_filesystem_size_bytes{kubernetes_node=~"${opts.nodes}", mountpoint="/"} - node_filesystem_avail_bytes{kubernetes_node=~"${opts.nodes}", mountpoint="/"}) by (kubernetes_node)`
|
||||
}
|
||||
};
|
||||
case 'nodes':
|
||||
return {
|
||||
memoryUsage: `sum (node_memory_MemTotal_bytes - (node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes)) by (kubernetes_node)`,
|
||||
@ -51,7 +51,7 @@ export class PrometheusLens implements PrometheusProvider {
|
||||
cpuCapacity: `sum(kube_node_status_allocatable{resource="cpu"}) by (node)`,
|
||||
fsSize: `sum(node_filesystem_size_bytes{mountpoint="/"}) by (kubernetes_node)`,
|
||||
fsUsage: `sum(node_filesystem_size_bytes{mountpoint="/"} - node_filesystem_avail_bytes{mountpoint="/"}) by (kubernetes_node)`
|
||||
}
|
||||
};
|
||||
case 'pods':
|
||||
return {
|
||||
cpuUsage: `sum(rate(container_cpu_usage_seconds_total{container!="POD",container!="",pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`,
|
||||
@ -63,21 +63,21 @@ export class PrometheusLens implements PrometheusProvider {
|
||||
fsUsage: `sum(container_fs_usage_bytes{container!="POD",container!="",pod=~"${opts.pods}",namespace="${opts.namespace}"}) by (${opts.selector})`,
|
||||
networkReceive: `sum(rate(container_network_receive_bytes_total{pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`,
|
||||
networkTransmit: `sum(rate(container_network_transmit_bytes_total{pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`
|
||||
}
|
||||
};
|
||||
case 'pvc':
|
||||
return {
|
||||
diskUsage: `sum(kubelet_volume_stats_used_bytes{persistentvolumeclaim="${opts.pvc}"}) by (persistentvolumeclaim, namespace)`,
|
||||
diskCapacity: `sum(kubelet_volume_stats_capacity_bytes{persistentvolumeclaim="${opts.pvc}"}) by (persistentvolumeclaim, namespace)`
|
||||
}
|
||||
};
|
||||
case 'ingress':
|
||||
const bytesSent = (ingress: string, statuses: string) =>
|
||||
`sum(rate(nginx_ingress_controller_bytes_sent_sum{ingress="${ingress}", status=~"${statuses}"}[${this.rateAccuracy}])) by (ingress)`
|
||||
`sum(rate(nginx_ingress_controller_bytes_sent_sum{ingress="${ingress}", status=~"${statuses}"}[${this.rateAccuracy}])) by (ingress)`;
|
||||
return {
|
||||
bytesSentSuccess: bytesSent(opts.igress, "^2\\\\d*"),
|
||||
bytesSentFailure: bytesSent(opts.ingres, "^5\\\\d*"),
|
||||
requestDurationSeconds: `sum(rate(nginx_ingress_controller_request_duration_seconds_sum{ingress="${opts.ingress}"}[${this.rateAccuracy}])) by (ingress)`,
|
||||
responseDurationSeconds: `sum(rate(nginx_ingress_controller_response_duration_seconds_sum{ingress="${opts.ingress}"}[${this.rateAccuracy}])) by (ingress)`
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -9,24 +9,24 @@ export class PrometheusOperator implements PrometheusProvider {
|
||||
|
||||
public async getPrometheusService(client: CoreV1Api): Promise<PrometheusService> {
|
||||
try {
|
||||
let service: V1Service
|
||||
let service: V1Service;
|
||||
for (const labelSelector of ["operated-prometheus=true", "self-monitor=true"]) {
|
||||
if (!service) {
|
||||
const serviceList = await client.listServiceForAllNamespaces(null, null, null, labelSelector)
|
||||
service = serviceList.body.items[0]
|
||||
const serviceList = await client.listServiceForAllNamespaces(null, null, null, labelSelector);
|
||||
service = serviceList.body.items[0];
|
||||
}
|
||||
}
|
||||
if (!service) return
|
||||
if (!service) return;
|
||||
|
||||
return {
|
||||
id: this.id,
|
||||
namespace: service.metadata.namespace,
|
||||
service: service.metadata.name,
|
||||
port: service.spec.ports[0].port
|
||||
}
|
||||
};
|
||||
} catch(error) {
|
||||
logger.warn(`PrometheusOperator: failed to list services: ${error.toString()}`)
|
||||
return
|
||||
logger.warn(`PrometheusOperator: failed to list services: ${error.toString()}`);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
@ -50,7 +50,7 @@ export class PrometheusOperator implements PrometheusProvider {
|
||||
podCapacity: `sum(kube_node_status_capacity{node=~"${opts.nodes}", resource="pods"})`,
|
||||
fsSize: `sum(node_filesystem_size_bytes{mountpoint="/"} * on (pod,namespace) group_left(node) kube_pod_info{node=~"${opts.nodes}"})`,
|
||||
fsUsage: `sum(node_filesystem_size_bytes{mountpoint="/"} * on (pod,namespace) group_left(node) kube_pod_info{node=~"${opts.nodes}"} - node_filesystem_avail_bytes{mountpoint="/"} * on (pod,namespace) group_left(node) kube_pod_info{node=~"${opts.nodes}"})`
|
||||
}
|
||||
};
|
||||
case 'nodes':
|
||||
return {
|
||||
memoryUsage: `sum((node_memory_MemTotal_bytes - (node_memory_MemFree_bytes + node_memory_Buffers_bytes + node_memory_Cached_bytes)) * on (pod,namespace) group_left(node) kube_pod_info) by (node)`,
|
||||
@ -59,7 +59,7 @@ export class PrometheusOperator implements PrometheusProvider {
|
||||
cpuCapacity: `sum(kube_node_status_allocatable{resource="cpu"}) by (node)`,
|
||||
fsSize: `sum(node_filesystem_size_bytes{mountpoint="/"} * on (pod,namespace) group_left(node) kube_pod_info) by (node)`,
|
||||
fsUsage: `sum((node_filesystem_size_bytes{mountpoint="/"} - node_filesystem_avail_bytes{mountpoint="/"}) * on (pod,namespace) group_left(node) kube_pod_info) by (node)`
|
||||
}
|
||||
};
|
||||
case 'pods':
|
||||
return {
|
||||
cpuUsage: `sum(rate(container_cpu_usage_seconds_total{container!="POD",container!="",image!="",pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`,
|
||||
@ -71,12 +71,12 @@ export class PrometheusOperator implements PrometheusProvider {
|
||||
fsUsage: `sum(container_fs_usage_bytes{container!="POD",container!="",pod=~"${opts.pods}",namespace="${opts.namespace}"}) by (${opts.selector})`,
|
||||
networkReceive: `sum(rate(container_network_receive_bytes_total{pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`,
|
||||
networkTransmit: `sum(rate(container_network_transmit_bytes_total{pod=~"${opts.pods}",namespace="${opts.namespace}"}[${this.rateAccuracy}])) by (${opts.selector})`
|
||||
}
|
||||
};
|
||||
case 'pvc':
|
||||
return {
|
||||
diskUsage: `sum(kubelet_volume_stats_used_bytes{persistentvolumeclaim="${opts.pvc}"}) by (persistentvolumeclaim, namespace)`,
|
||||
diskCapacity: `sum(kubelet_volume_stats_capacity_bytes{persistentvolumeclaim="${opts.pvc}"}) by (persistentvolumeclaim, namespace)`
|
||||
}
|
||||
};
|
||||
case 'ingress':
|
||||
const bytesSent = (ingress: string, statuses: string) =>
|
||||
`sum(rate(nginx_ingress_controller_bytes_sent_sum{ingress="${ingress}", status=~"${statuses}"}[${this.rateAccuracy}])) by (ingress)`;
|
||||
@ -85,7 +85,7 @@ export class PrometheusOperator implements PrometheusProvider {
|
||||
bytesSentFailure: bytesSent(opts.ingres, "^5\\\\d*"),
|
||||
requestDurationSeconds: `sum(rate(nginx_ingress_controller_request_duration_seconds_sum{ingress="${opts.ingress}"}[${this.rateAccuracy}])) by (ingress)`,
|
||||
responseDurationSeconds: `sum(rate(nginx_ingress_controller_response_duration_seconds_sum{ingress="${opts.ingress}"}[${this.rateAccuracy}])) by (ingress)`
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user