Skip to content

Commit

Permalink
Merge pull request #215 from RedisInsight/feature/RI-6233_Migrate_to_…
Browse files Browse the repository at this point in the history
…Github_actions

#RI-6233 - Migrate to GitHub actions
  • Loading branch information
egor-zalenski authored Oct 30, 2024
2 parents f9f0af4 + cd23d8d commit 8c63a82
Show file tree
Hide file tree
Showing 22 changed files with 932 additions and 6 deletions.
14 changes: 14 additions & 0 deletions .github/actions/download-backend/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Download backend

inputs:
arch:
description: Architecture arm64 or x64
required: false
default: 'x64'

runs:
using: 'composite'
steps:
- name: Download backend
shell: bash
run: yarn download:backend ${{ inputs.arch }}
15 changes: 15 additions & 0 deletions .github/actions/install-all-build-libs/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: Install all libraries action
description: Install all libraries and dependencies

runs:
using: 'composite'
steps:
# OS libraries
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '20.15'

- name: Install dependencies for root package.js
shell: bash
run: yarn install --frozen-lockfile --network-timeout 1000000
4 changes: 4 additions & 0 deletions .github/build/sum_sha256.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash
set -e

find ./release -type f -name '*.tar.gz' -execdir sh -c 'sha256sum "$1" > "$1.sha256"' _ {} \;
250 changes: 250 additions & 0 deletions .github/deps-licenses-report.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,250 @@
const fs = require('fs');
const { join } = require('path');
const { last, set } = require('lodash');
const { google } = require('googleapis');
const { exec } = require('child_process');
const csvParser = require('csv-parser');
const { stringify } = require('csv-stringify');

const licenseFolderName = 'licenses';
const spreadsheetId = process.env.SPREADSHEET_ID;
const summaryFilePath = `./${licenseFolderName}/summary.csv`;
const allData = [];
let csvFiles = [];

// Main function
async function main() {
const folderPath = './';
const packageJsons = findPackageJsonFiles(folderPath); // Find all package.json files in the given folder

console.log('All package.jsons was found:', packageJsons);

// Create the folder if it doesn't exist
if (!fs.existsSync(licenseFolderName)) {
fs.mkdirSync(licenseFolderName);
}

try {
await Promise.all(packageJsons.map(runLicenseCheck));
console.log('All csv files was generated');
await generateSummary()
await sendLicensesToGoogleSheet()
} catch (error) {
console.error('An error occurred:', error);
process.exit(1);
}
}

main();

// Function to find all package.json files in a given folder
function findPackageJsonFiles(folderPath) {
const packageJsonPaths = [];
const packageJsonName = 'package.json';
const excludeFolders = ['dist', 'node_modules', 'test-extensions'];

// Recursive function to search for package.json files
function searchForPackageJson(currentPath) {
const files = fs.readdirSync(currentPath);

for (const file of files) {
const filePath = join(currentPath, file);
const stats = fs.statSync(filePath);

if (stats.isDirectory() && !excludeFolders.includes(file)) {
searchForPackageJson(filePath);
} else if (file === packageJsonName) {
packageJsonPaths.push(`./${filePath.slice(0, -packageJsonName.length - 1)}`);
}
}
}

searchForPackageJson(folderPath);
return packageJsonPaths;
}

// Function to run license check for a given package.json file
async function runLicenseCheck(path) {
const name = last(path.split('/')) || 'vscode';

const COMMANDS = [
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_prod.csv --production`,
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_dev.csv --development`,
]

return await Promise.all(COMMANDS.map((command) =>
new Promise((resolve, reject) => {
exec(command, (error, stdout, stderr) => {
if (error) {
console.error(`Failed command: ${command}, error:`, stderr);
reject(error);
}
resolve();
});
})
));
}

async function sendLicensesToGoogleSheet() {
try {
const serviceAccountKey = JSON.parse(fs.readFileSync('./gasKey.json', 'utf-8'));

// Set up JWT client
const jwtClient = new google.auth.JWT(
serviceAccountKey.client_email,
null,
serviceAccountKey.private_key,
['https://www.googleapis.com/auth/spreadsheets']
);

const sheets = google.sheets('v4');

// Read all .csv files in the 'licenses' folder
csvFiles.forEach((csvFile) => {
// Extract sheet name from file name
const sheetName = csvFile.replace('.csv', '').replaceAll('_', ' ');

const data = [];
fs.createReadStream(`./${licenseFolderName}/${csvFile}`)
.pipe(csvParser({ headers: false }))
.on('data', (row) => {
data.push(Object.values(row));
})
.on('end', async () => {
const resource = { values: data };

try {
const response = await sheets.spreadsheets.get({
auth: jwtClient,
spreadsheetId,
});

const sheet = response.data.sheets.find((sheet) => sheet.properties.title === sheetName);
if (sheet) {
// Clear contents of the sheet starting from cell A2
await sheets.spreadsheets.values.clear({
auth: jwtClient,
spreadsheetId,
range: `${sheetName}!A1:Z`, // Assuming Z is the last column
});
} else {
// Create the sheet if it doesn't exist
await sheets.spreadsheets.batchUpdate({
auth: jwtClient,
spreadsheetId,
resource: set({}, 'requests[0].addSheet.properties.title', sheetName),
});
}
} catch (error) {
console.error(`Error checking/creating sheet for ${sheetName}:`, error);
}

try {
await sheets.spreadsheets.values.batchUpdate({
auth: jwtClient,
spreadsheetId,
resource: {
valueInputOption: 'RAW',
data: [
{
range: `${sheetName}!A1`, // Use the sheet name as the range and start from A2
majorDimension: 'ROWS',
values: data,
},
],
},
});

console.log(`CSV data has been inserted into ${sheetName} sheet.`);
} catch (err) {
console.error(`Error inserting data for ${sheetName}:`, err);
}
});
});
} catch (error) {
console.error('Error loading service account key:', error);
}
}

// Function to read and process each CSV file
const processCSVFile = (file) => {
return new Promise((resolve, reject) => {
const parser = csvParser({ columns: true, trim: true });
const input = fs.createReadStream(`./${licenseFolderName}/${file}`);

parser.on('data', (record) => {
allData.push(record);
});

parser.on('end', () => {
resolve();
});

parser.on('error', (err) => {
reject(err);
});

input.pipe(parser);
});
};

// Process and aggregate license data
const processLicenseData = () => {
const licenseCountMap = {};
for (const record of allData) {
const license = record.license;
licenseCountMap[license] = (licenseCountMap[license] || 0) + 1;
}
return licenseCountMap;
};

// Create summary CSV data
const createSummaryData = (licenseCountMap) => {
const summaryData = [['License', 'Count']];
for (const license in licenseCountMap) {
summaryData.push([license, licenseCountMap[license]]);
}
return summaryData;
};

// Write summary CSV file
const writeSummaryCSV = async (summaryData) => {
try {
const summaryCsvString = await stringifyPromise(summaryData);
fs.writeFileSync(summaryFilePath, summaryCsvString);
csvFiles.push(last(summaryFilePath.split('/')));
console.log(`Summary CSV saved as ${summaryFilePath}`);
} catch (err) {
console.error(`Error: ${err}`);
}
};

// Stringify as a promise
const stringifyPromise = (data) => {
return new Promise((resolve, reject) => {
stringify(data, (err, csvString) => {
if (err) {
reject(err);
} else {
resolve(csvString);
}
});
});
};

async function generateSummary() {
csvFiles = fs.readdirSync(licenseFolderName).filter(file => file.endsWith('.csv')).sort();

for (const file of csvFiles) {
try {
await processCSVFile(file);
} catch (err) {
console.error(`Error processing ${file}: ${err}`);
}
}

const licenseCountMap = processLicenseData();
const summaryData = createSummaryData(licenseCountMap);

await writeSummaryCSV(summaryData);
}
57 changes: 57 additions & 0 deletions .github/e2e/e2e-results.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
const fs = require('fs');

let parallelNodeInfo = '';
const totalNodes = 4;
if (totalNodes > 1) {
parallelNodeInfo = ` (node: ${parseInt(process.env.NODE_INDEX, 10) + 1}/${totalNodes})`
}

const file = fs.readdirSync('tests/e2e/mochawesome-report').find(file => file.endsWith('-setup-report.json'))
const appBuildType = process.env.APP_BUILD_TYPE || 'VSCode (Linux)'
const results = {
message: {
text: `*E2ETest - ${appBuildType}${parallelNodeInfo}* (Branch: *${process.env.GITHUB_REF_NAME}*)` +
`\n<https://github.com/RedisInsight/RedisInsight/actions/runs/${process.env.GITHUB_RUN_ID}|View on Github Actions>`,
attachments: [],
},
};

const result = JSON.parse(fs.readFileSync(file, 'utf-8'))
const testRunResult = {
color: '#36a64f',
title: `Started at: *${result.stats.start}`,
text: `Executed ${result.stats.tests} in ${(new Date(result.stats.end) - new Date(result.stats.start)) / 1000}s`,
fields: [
{
title: 'Passed',
value: result.stats.passes,
short: true,
},
{
title: 'Skipped',
value: result.stats.skipped,
short: true,
},
],
};
const failed = result.stats.failures;
if (failed) {
results.passed = false;
testRunResult.color = '#cc0000';
testRunResult.fields.push({
title: 'Failed',
value: failed,
short: true,
});
}

results.message.attachments.push(testRunResult);

if (results.passed === false) {
results.message.text = '<!here> ' + results.message.text;
}

fs.writeFileSync('e2e.report.json', JSON.stringify({
channel: process.env.SLACK_TEST_REPORT_CHANNEL,
...results.message,
}));
15 changes: 15 additions & 0 deletions .github/e2e/test.app.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash
set -e

# Create folder before tests run to prevent permissions issues
mkdir -p tests/e2e/remote

# Run RTE (Redis Test Environment)
docker compose -f tests/e2e/rte.docker-compose.yml build
docker compose -f tests/e2e/rte.docker-compose.yml up --force-recreate -d -V
./tests/e2e/wait-for-redis.sh localhost 12000

# Run tests
RI_SOCKETS_CORS=true \
xvfb-run --auto-servernum \
yarn --cwd tests/e2e dotenv -e .ci.env yarn --cwd tests/e2e test:ci
33 changes: 33 additions & 0 deletions .github/workflows/aws.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: AWS

on:
workflow_call:

env:
AWS_BUCKET_NAME: ${{ secrets.AWS_BUCKET_NAME }}
AWS_DISTRIBUTION_ID: ${{ secrets.AWS_DISTRIBUTION_ID }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}

jobs:
release-private:
name: Release s3 private
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- name: Download All Artifacts
uses: actions/download-artifact@v4
with:
path: ./release

- run: ls -R ./release

- name: Publish private
run: |
chmod +x .github/build/sum_sha256.sh
.github/build/sum_sha256.sh
applicationVersion=$(jq -r '.version' package.json)
aws s3 cp release/ s3://${AWS_BUCKET_NAME}/private/vscode/${applicationVersion} --recursive
Loading

0 comments on commit 8c63a82

Please sign in to comment.