-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #234 from RedisInsight/release/1.2.0
Release/1.2.0 to latest
- Loading branch information
Showing
163 changed files
with
4,525 additions
and
1,412 deletions.
There are no files selected for viewing
Large diffs are not rendered by default.
Oops, something went wrong.
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,14 @@ | ||
name: Download backend | ||
|
||
inputs: | ||
arch: | ||
description: Architecture arm64 or x64 | ||
required: false | ||
default: 'x64' | ||
|
||
runs: | ||
using: 'composite' | ||
steps: | ||
- name: Download backend | ||
shell: bash | ||
run: yarn download:backend ${{ inputs.arch }} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
name: Install all libraries action | ||
description: Install all libraries and dependencies | ||
|
||
runs: | ||
using: 'composite' | ||
steps: | ||
# OS libraries | ||
- name: Setup Node | ||
uses: actions/setup-node@v4 | ||
with: | ||
node-version: '20.18.0' | ||
|
||
- name: Install dependencies for root package.js | ||
shell: bash | ||
run: yarn install --frozen-lockfile --network-timeout 1000000 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
#!/bin/bash | ||
set -e | ||
|
||
find ./release -type f -name '*.tar.gz' -execdir sh -c 'sha256sum "$1" > "$1.sha256"' _ {} \; |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,250 @@ | ||
const fs = require('fs'); | ||
const { join } = require('path'); | ||
const { last, set } = require('lodash'); | ||
const { google } = require('googleapis'); | ||
const { exec } = require('child_process'); | ||
const csvParser = require('csv-parser'); | ||
const { stringify } = require('csv-stringify'); | ||
|
||
const licenseFolderName = 'licenses'; | ||
const spreadsheetId = process.env.SPREADSHEET_ID; | ||
const summaryFilePath = `./${licenseFolderName}/summary.csv`; | ||
const allData = []; | ||
let csvFiles = []; | ||
|
||
// Main function | ||
async function main() { | ||
const folderPath = './'; | ||
const packageJsons = findPackageJsonFiles(folderPath); // Find all package.json files in the given folder | ||
|
||
console.log('All package.jsons was found:', packageJsons); | ||
|
||
// Create the folder if it doesn't exist | ||
if (!fs.existsSync(licenseFolderName)) { | ||
fs.mkdirSync(licenseFolderName); | ||
} | ||
|
||
try { | ||
await Promise.all(packageJsons.map(runLicenseCheck)); | ||
console.log('All csv files was generated'); | ||
await generateSummary() | ||
await sendLicensesToGoogleSheet() | ||
} catch (error) { | ||
console.error('An error occurred:', error); | ||
process.exit(1); | ||
} | ||
} | ||
|
||
main(); | ||
|
||
// Function to find all package.json files in a given folder | ||
function findPackageJsonFiles(folderPath) { | ||
const packageJsonPaths = []; | ||
const packageJsonName = 'package.json'; | ||
const excludeFolders = ['dist', 'node_modules', 'test-extensions']; | ||
|
||
// Recursive function to search for package.json files | ||
function searchForPackageJson(currentPath) { | ||
const files = fs.readdirSync(currentPath); | ||
|
||
for (const file of files) { | ||
const filePath = join(currentPath, file); | ||
const stats = fs.statSync(filePath); | ||
|
||
if (stats.isDirectory() && !excludeFolders.includes(file)) { | ||
searchForPackageJson(filePath); | ||
} else if (file === packageJsonName) { | ||
packageJsonPaths.push(`./${filePath.slice(0, -packageJsonName.length - 1)}`); | ||
} | ||
} | ||
} | ||
|
||
searchForPackageJson(folderPath); | ||
return packageJsonPaths; | ||
} | ||
|
||
// Function to run license check for a given package.json file | ||
async function runLicenseCheck(path) { | ||
const name = last(path.split('/')) || 'vscode'; | ||
|
||
const COMMANDS = [ | ||
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_prod.csv --production`, | ||
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_dev.csv --development`, | ||
] | ||
|
||
return await Promise.all(COMMANDS.map((command) => | ||
new Promise((resolve, reject) => { | ||
exec(command, (error, stdout, stderr) => { | ||
if (error) { | ||
console.error(`Failed command: ${command}, error:`, stderr); | ||
reject(error); | ||
} | ||
resolve(); | ||
}); | ||
}) | ||
)); | ||
} | ||
|
||
async function sendLicensesToGoogleSheet() { | ||
try { | ||
const serviceAccountKey = JSON.parse(fs.readFileSync('./gasKey.json', 'utf-8')); | ||
|
||
// Set up JWT client | ||
const jwtClient = new google.auth.JWT( | ||
serviceAccountKey.client_email, | ||
null, | ||
serviceAccountKey.private_key, | ||
['https://www.googleapis.com/auth/spreadsheets'] | ||
); | ||
|
||
const sheets = google.sheets('v4'); | ||
|
||
// Read all .csv files in the 'licenses' folder | ||
csvFiles.forEach((csvFile) => { | ||
// Extract sheet name from file name | ||
const sheetName = csvFile.replace('.csv', '').replaceAll('_', ' '); | ||
|
||
const data = []; | ||
fs.createReadStream(`./${licenseFolderName}/${csvFile}`) | ||
.pipe(csvParser({ headers: false })) | ||
.on('data', (row) => { | ||
data.push(Object.values(row)); | ||
}) | ||
.on('end', async () => { | ||
const resource = { values: data }; | ||
|
||
try { | ||
const response = await sheets.spreadsheets.get({ | ||
auth: jwtClient, | ||
spreadsheetId, | ||
}); | ||
|
||
const sheet = response.data.sheets.find((sheet) => sheet.properties.title === sheetName); | ||
if (sheet) { | ||
// Clear contents of the sheet starting from cell A2 | ||
await sheets.spreadsheets.values.clear({ | ||
auth: jwtClient, | ||
spreadsheetId, | ||
range: `${sheetName}!A1:Z`, // Assuming Z is the last column | ||
}); | ||
} else { | ||
// Create the sheet if it doesn't exist | ||
await sheets.spreadsheets.batchUpdate({ | ||
auth: jwtClient, | ||
spreadsheetId, | ||
resource: set({}, 'requests[0].addSheet.properties.title', sheetName), | ||
}); | ||
} | ||
} catch (error) { | ||
console.error(`Error checking/creating sheet for ${sheetName}:`, error); | ||
} | ||
|
||
try { | ||
await sheets.spreadsheets.values.batchUpdate({ | ||
auth: jwtClient, | ||
spreadsheetId, | ||
resource: { | ||
valueInputOption: 'RAW', | ||
data: [ | ||
{ | ||
range: `${sheetName}!A1`, // Use the sheet name as the range and start from A2 | ||
majorDimension: 'ROWS', | ||
values: data, | ||
}, | ||
], | ||
}, | ||
}); | ||
|
||
console.log(`CSV data has been inserted into ${sheetName} sheet.`); | ||
} catch (err) { | ||
console.error(`Error inserting data for ${sheetName}:`, err); | ||
} | ||
}); | ||
}); | ||
} catch (error) { | ||
console.error('Error loading service account key:', error); | ||
} | ||
} | ||
|
||
// Function to read and process each CSV file | ||
const processCSVFile = (file) => { | ||
return new Promise((resolve, reject) => { | ||
const parser = csvParser({ columns: true, trim: true }); | ||
const input = fs.createReadStream(`./${licenseFolderName}/${file}`); | ||
|
||
parser.on('data', (record) => { | ||
allData.push(record); | ||
}); | ||
|
||
parser.on('end', () => { | ||
resolve(); | ||
}); | ||
|
||
parser.on('error', (err) => { | ||
reject(err); | ||
}); | ||
|
||
input.pipe(parser); | ||
}); | ||
}; | ||
|
||
// Process and aggregate license data | ||
const processLicenseData = () => { | ||
const licenseCountMap = {}; | ||
for (const record of allData) { | ||
const license = record.license; | ||
licenseCountMap[license] = (licenseCountMap[license] || 0) + 1; | ||
} | ||
return licenseCountMap; | ||
}; | ||
|
||
// Create summary CSV data | ||
const createSummaryData = (licenseCountMap) => { | ||
const summaryData = [['License', 'Count']]; | ||
for (const license in licenseCountMap) { | ||
summaryData.push([license, licenseCountMap[license]]); | ||
} | ||
return summaryData; | ||
}; | ||
|
||
// Write summary CSV file | ||
const writeSummaryCSV = async (summaryData) => { | ||
try { | ||
const summaryCsvString = await stringifyPromise(summaryData); | ||
fs.writeFileSync(summaryFilePath, summaryCsvString); | ||
csvFiles.push(last(summaryFilePath.split('/'))); | ||
console.log(`Summary CSV saved as ${summaryFilePath}`); | ||
} catch (err) { | ||
console.error(`Error: ${err}`); | ||
} | ||
}; | ||
|
||
// Stringify as a promise | ||
const stringifyPromise = (data) => { | ||
return new Promise((resolve, reject) => { | ||
stringify(data, (err, csvString) => { | ||
if (err) { | ||
reject(err); | ||
} else { | ||
resolve(csvString); | ||
} | ||
}); | ||
}); | ||
}; | ||
|
||
async function generateSummary() { | ||
csvFiles = fs.readdirSync(licenseFolderName).filter(file => file.endsWith('.csv')).sort(); | ||
|
||
for (const file of csvFiles) { | ||
try { | ||
await processCSVFile(file); | ||
} catch (err) { | ||
console.error(`Error processing ${file}: ${err}`); | ||
} | ||
} | ||
|
||
const licenseCountMap = processLicenseData(); | ||
const summaryData = createSummaryData(licenseCountMap); | ||
|
||
await writeSummaryCSV(summaryData); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
const fs = require('fs'); | ||
|
||
let parallelNodeInfo = ''; | ||
const totalNodes = 4; | ||
if (totalNodes > 1) { | ||
parallelNodeInfo = ` (node: ${parseInt(process.env.NODE_INDEX, 10) + 1}/${totalNodes})` | ||
} | ||
|
||
const file = fs.readdirSync('tests/e2e/mochawesome-report').find(file => file.endsWith('-setup-report.json')) | ||
const appBuildType = process.env.APP_BUILD_TYPE || 'VSCode (Linux)' | ||
const results = { | ||
message: { | ||
text: `*E2ETest - ${appBuildType}${parallelNodeInfo}* (Branch: *${process.env.GITHUB_REF_NAME}*)` + | ||
`\n<https://github.com/RedisInsight/RedisInsight/actions/runs/${process.env.GITHUB_RUN_ID}|View on Github Actions>`, | ||
attachments: [], | ||
}, | ||
}; | ||
|
||
const result = JSON.parse(fs.readFileSync(file, 'utf-8')) | ||
const testRunResult = { | ||
color: '#36a64f', | ||
title: `Started at: *${result.stats.start}`, | ||
text: `Executed ${result.stats.tests} in ${(new Date(result.stats.end) - new Date(result.stats.start)) / 1000}s`, | ||
fields: [ | ||
{ | ||
title: 'Passed', | ||
value: result.stats.passes, | ||
short: true, | ||
}, | ||
{ | ||
title: 'Skipped', | ||
value: result.stats.skipped, | ||
short: true, | ||
}, | ||
], | ||
}; | ||
const failed = result.stats.failures; | ||
if (failed) { | ||
results.passed = false; | ||
testRunResult.color = '#cc0000'; | ||
testRunResult.fields.push({ | ||
title: 'Failed', | ||
value: failed, | ||
short: true, | ||
}); | ||
} | ||
|
||
results.message.attachments.push(testRunResult); | ||
|
||
if (results.passed === false) { | ||
results.message.text = '<!here> ' + results.message.text; | ||
} | ||
|
||
fs.writeFileSync('e2e.report.json', JSON.stringify({ | ||
channel: process.env.SLACK_TEST_REPORT_CHANNEL, | ||
...results.message, | ||
})); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,15 @@ | ||
#!/bin/bash | ||
set -e | ||
|
||
# Create folder before tests run to prevent permissions issues | ||
mkdir -p tests/e2e/remote | ||
|
||
# Run RTE (Redis Test Environment) | ||
docker compose -f tests/e2e/rte.docker-compose.yml build | ||
docker compose -f tests/e2e/rte.docker-compose.yml up --force-recreate -d -V | ||
./tests/e2e/wait-for-redis.sh localhost 12000 | ||
|
||
# Run tests | ||
RI_SOCKETS_CORS=true \ | ||
xvfb-run --auto-servernum \ | ||
yarn --cwd tests/e2e dotenv -e .ci.env yarn --cwd tests/e2e test:ci |
Oops, something went wrong.