Add new files and put static version back to 1.0

This commit is contained in:
Chris Hunt
2024-08-13 10:31:16 +01:00
parent 4476528522
commit 8ddd8aabe0
6 changed files with 105 additions and 4 deletions

View File

@@ -0,0 +1,48 @@
const fs = require("fs");
const glob = require("glob");
const baseversion = '1.16'
const tag = ':develop'
const version = baseversion + '.x'
const tagversion = baseversion + '.0'
glob("../workspaces/**/workspace.json", async function (err, files) {
if (err) {
console.log(
"cannot read the folder, something goes wrong with glob",
err
);
}
for (const file of files) {
let filedata = fs.readFileSync(file);
let parsed = JSON.parse(filedata);
const current = parsed.compatibility[parsed.compatibility.length - 1]
const image = current.image.split(':')[0]
const exists = parsed.compatibility.findIndex(el => el.version === version)
let details = {
version,
image: image + tag,
uncompressed_size_mb: 0,
available_tags: [
'develop',
tagversion,
tagversion + '-rolling-weekly',
tagversion + '-rolling-daily'
]
}
if (exists === -1) {
parsed.compatibility.push(details)
fs.writeFileSync(file, JSON.stringify(parsed, null, 2));
}
}
});

View File

@@ -0,0 +1,45 @@
const fs = require("fs");
const glob = require("glob");
const { execSync } = require('child_process');
glob("../workspaces/**/workspace.json", function (err, files) {
if (err) {
console.log(
"cannot read the folder, something goes wrong with glob",
err
);
}
let total = 0
for (const file of files) {
let filedata = fs.readFileSync(file);
let parsed = JSON.parse(filedata);
parsed.compatibility.forEach((element, index) => {
total++
if (element.uncompressed_size_mb === 0) {
execSync('docker image prune -a -f')
execSync('docker system prune --all --force --volumes')
let pull = execSync('docker pull ' + element.image)
// console.log(pull)
let inspect = execSync('docker inspect -f "{{ .Size }}" ' + element.image)
let size = Math.round(inspect / 1000000)
let remove = execSync('docker rmi ' + element.image)
console.log(remove)
parsed.compatibility[index].uncompressed_size_mb = size
console.log('Write file: ' + parsed.friendly_name + ' - ' + element.version + ': ' + size)
fs.writeFileSync(file, JSON.stringify(parsed, null, 2));
} else {
console.log(parsed.friendly_name + ' - ' + element.version + ': skipped')
}
})
}
console.log(total + ' entries processed')
});

View File

@@ -40,6 +40,13 @@ glob("**/workspace.json", async function (err, files) {
let parsed = JSON.parse(filedata);
parsed.sha = hash.hash;
console.log(parsed.name + ' added')
parsed.compatibility.forEach((element, index) => {
if ('available_tags' in element) {
element.available_tags.forEach((el) => {
channels.add(el)
})
}
})
workspaces.push(parsed);
if (fs.existsSync(folder + "/" + parsed.image_src)) {
@@ -60,6 +67,7 @@ glob("**/workspace.json", async function (err, files) {
contact_url: nextConfig.env.contactUrl || null,
modified: Date.now(),
workspaces: workspaces,
channels: [...channels]
};
let data = JSON.stringify(json);