Skip to content

Commit

Permalink
Merge pull request #234 from RedisInsight/release/1.2.0
Browse files Browse the repository at this point in the history
Release/1.2.0 to latest
  • Loading branch information
vlad-dargel authored Dec 20, 2024
2 parents 55b12b4 + a0231af commit 3b7a917
Show file tree
Hide file tree
Showing 163 changed files with 4,525 additions and 1,412 deletions.
599 changes: 7 additions & 592 deletions .circleci/config.yml

Large diffs are not rendered by default.

610 changes: 610 additions & 0 deletions .circleci/config.yml.backup

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions .env
Original file line number Diff line number Diff line change
Expand Up @@ -5,17 +5,17 @@
NODE_ENV='production'
RI_BASE_APP_URL='http://localhost'
RI_APP_PORT=5541
RI_APP_VERSION='1.0.0'
RI_APP_VERSION='1.2.0'
RI_APP_PREFIX='api'
RI_APP_FOLDER_NAME='.redis-for-vscode'
RI_CDN_PATH='https://s3.amazonaws.com/redisinsight.download/public/releases/2.54.1/web-mini'
RI_CDN_PATH='https://s3.amazonaws.com/redisinsight.download/public/releases/2.64.0/web-mini'
RI_WITHOUT_BACKEND=false
# RI_WITHOUT_BACKEND=true
RI_STDOUT_LOGGER=false
RI_AUTO_BOOTSTRAP=false
RI_MIGRATE_OLD_FOLDERS=false
RI_BUILD_TYPE='VS_CODE'
RI_ENCRYPTION_KEYTAR=false
RI_ANALYTICS_START_EVENTS=true
RI_AGREEMENTS_PATH='../../webviews/resources/agreements-spec.json'
RI_ENCRYPTION_KEYTAR_SERVICE="redis-for-vscode"
# RI_SEGMENT_WRITE_KEY='SEGMENT_WRITE_KEY'
2 changes: 2 additions & 0 deletions .eslintrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -86,6 +86,8 @@ module.exports = {
'function-paren-newline': 'off',
'prefer-regex-literals': 'off',
'react/display-name': 'off',
'react/jsx-indent-props': [2, 2],
'react/jsx-indent': [2, 2],
'no-promise-executor-return': 'off',
'import/order': [
1,
Expand Down
14 changes: 14 additions & 0 deletions .github/actions/download-backend/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
name: Download backend

inputs:
arch:
description: Architecture arm64 or x64
required: false
default: 'x64'

runs:
using: 'composite'
steps:
- name: Download backend
shell: bash
run: yarn download:backend ${{ inputs.arch }}
15 changes: 15 additions & 0 deletions .github/actions/install-all-build-libs/action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
name: Install all libraries action
description: Install all libraries and dependencies

runs:
using: 'composite'
steps:
# OS libraries
- name: Setup Node
uses: actions/setup-node@v4
with:
node-version: '20.18.0'

- name: Install dependencies for root package.js
shell: bash
run: yarn install --frozen-lockfile --network-timeout 1000000
4 changes: 4 additions & 0 deletions .github/build/sum_sha256.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
#!/bin/bash
set -e

find ./release -type f -name '*.tar.gz' -execdir sh -c 'sha256sum "$1" > "$1.sha256"' _ {} \;
250 changes: 250 additions & 0 deletions .github/deps-licenses-report.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,250 @@
const fs = require('fs');
const { join } = require('path');
const { last, set } = require('lodash');
const { google } = require('googleapis');
const { exec } = require('child_process');
const csvParser = require('csv-parser');
const { stringify } = require('csv-stringify');

const licenseFolderName = 'licenses';
const spreadsheetId = process.env.SPREADSHEET_ID;
const summaryFilePath = `./${licenseFolderName}/summary.csv`;
const allData = [];
let csvFiles = [];

// Main function
async function main() {
const folderPath = './';
const packageJsons = findPackageJsonFiles(folderPath); // Find all package.json files in the given folder

console.log('All package.jsons was found:', packageJsons);

// Create the folder if it doesn't exist
if (!fs.existsSync(licenseFolderName)) {
fs.mkdirSync(licenseFolderName);
}

try {
await Promise.all(packageJsons.map(runLicenseCheck));
console.log('All csv files was generated');
await generateSummary()
await sendLicensesToGoogleSheet()
} catch (error) {
console.error('An error occurred:', error);
process.exit(1);
}
}

main();

// Function to find all package.json files in a given folder
function findPackageJsonFiles(folderPath) {
const packageJsonPaths = [];
const packageJsonName = 'package.json';
const excludeFolders = ['dist', 'node_modules', 'test-extensions'];

// Recursive function to search for package.json files
function searchForPackageJson(currentPath) {
const files = fs.readdirSync(currentPath);

for (const file of files) {
const filePath = join(currentPath, file);
const stats = fs.statSync(filePath);

if (stats.isDirectory() && !excludeFolders.includes(file)) {
searchForPackageJson(filePath);
} else if (file === packageJsonName) {
packageJsonPaths.push(`./${filePath.slice(0, -packageJsonName.length - 1)}`);
}
}
}

searchForPackageJson(folderPath);
return packageJsonPaths;
}

// Function to run license check for a given package.json file
async function runLicenseCheck(path) {
const name = last(path.split('/')) || 'vscode';

const COMMANDS = [
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_prod.csv --production`,
`license-checker --start ${path} --csv --out ./${licenseFolderName}/${name}_dev.csv --development`,
]

return await Promise.all(COMMANDS.map((command) =>
new Promise((resolve, reject) => {
exec(command, (error, stdout, stderr) => {
if (error) {
console.error(`Failed command: ${command}, error:`, stderr);
reject(error);
}
resolve();
});
})
));
}

async function sendLicensesToGoogleSheet() {
try {
const serviceAccountKey = JSON.parse(fs.readFileSync('./gasKey.json', 'utf-8'));

// Set up JWT client
const jwtClient = new google.auth.JWT(
serviceAccountKey.client_email,
null,
serviceAccountKey.private_key,
['https://www.googleapis.com/auth/spreadsheets']
);

const sheets = google.sheets('v4');

// Read all .csv files in the 'licenses' folder
csvFiles.forEach((csvFile) => {
// Extract sheet name from file name
const sheetName = csvFile.replace('.csv', '').replaceAll('_', ' ');

const data = [];
fs.createReadStream(`./${licenseFolderName}/${csvFile}`)
.pipe(csvParser({ headers: false }))
.on('data', (row) => {
data.push(Object.values(row));
})
.on('end', async () => {
const resource = { values: data };

try {
const response = await sheets.spreadsheets.get({
auth: jwtClient,
spreadsheetId,
});

const sheet = response.data.sheets.find((sheet) => sheet.properties.title === sheetName);
if (sheet) {
// Clear contents of the sheet starting from cell A2
await sheets.spreadsheets.values.clear({
auth: jwtClient,
spreadsheetId,
range: `${sheetName}!A1:Z`, // Assuming Z is the last column
});
} else {
// Create the sheet if it doesn't exist
await sheets.spreadsheets.batchUpdate({
auth: jwtClient,
spreadsheetId,
resource: set({}, 'requests[0].addSheet.properties.title', sheetName),
});
}
} catch (error) {
console.error(`Error checking/creating sheet for ${sheetName}:`, error);
}

try {
await sheets.spreadsheets.values.batchUpdate({
auth: jwtClient,
spreadsheetId,
resource: {
valueInputOption: 'RAW',
data: [
{
range: `${sheetName}!A1`, // Use the sheet name as the range and start from A2
majorDimension: 'ROWS',
values: data,
},
],
},
});

console.log(`CSV data has been inserted into ${sheetName} sheet.`);
} catch (err) {
console.error(`Error inserting data for ${sheetName}:`, err);
}
});
});
} catch (error) {
console.error('Error loading service account key:', error);
}
}

// Function to read and process each CSV file
const processCSVFile = (file) => {
return new Promise((resolve, reject) => {
const parser = csvParser({ columns: true, trim: true });
const input = fs.createReadStream(`./${licenseFolderName}/${file}`);

parser.on('data', (record) => {
allData.push(record);
});

parser.on('end', () => {
resolve();
});

parser.on('error', (err) => {
reject(err);
});

input.pipe(parser);
});
};

// Process and aggregate license data
const processLicenseData = () => {
const licenseCountMap = {};
for (const record of allData) {
const license = record.license;
licenseCountMap[license] = (licenseCountMap[license] || 0) + 1;
}
return licenseCountMap;
};

// Create summary CSV data
const createSummaryData = (licenseCountMap) => {
const summaryData = [['License', 'Count']];
for (const license in licenseCountMap) {
summaryData.push([license, licenseCountMap[license]]);
}
return summaryData;
};

// Write summary CSV file
const writeSummaryCSV = async (summaryData) => {
try {
const summaryCsvString = await stringifyPromise(summaryData);
fs.writeFileSync(summaryFilePath, summaryCsvString);
csvFiles.push(last(summaryFilePath.split('/')));
console.log(`Summary CSV saved as ${summaryFilePath}`);
} catch (err) {
console.error(`Error: ${err}`);
}
};

// Stringify as a promise
const stringifyPromise = (data) => {
return new Promise((resolve, reject) => {
stringify(data, (err, csvString) => {
if (err) {
reject(err);
} else {
resolve(csvString);
}
});
});
};

async function generateSummary() {
csvFiles = fs.readdirSync(licenseFolderName).filter(file => file.endsWith('.csv')).sort();

for (const file of csvFiles) {
try {
await processCSVFile(file);
} catch (err) {
console.error(`Error processing ${file}: ${err}`);
}
}

const licenseCountMap = processLicenseData();
const summaryData = createSummaryData(licenseCountMap);

await writeSummaryCSV(summaryData);
}
57 changes: 57 additions & 0 deletions .github/e2e/e2e-results.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
const fs = require('fs');

let parallelNodeInfo = '';
const totalNodes = 4;
if (totalNodes > 1) {
parallelNodeInfo = ` (node: ${parseInt(process.env.NODE_INDEX, 10) + 1}/${totalNodes})`
}

const file = fs.readdirSync('tests/e2e/mochawesome-report').find(file => file.endsWith('-setup-report.json'))
const appBuildType = process.env.APP_BUILD_TYPE || 'VSCode (Linux)'
const results = {
message: {
text: `*E2ETest - ${appBuildType}${parallelNodeInfo}* (Branch: *${process.env.GITHUB_REF_NAME}*)` +
`\n<https://github.com/RedisInsight/RedisInsight/actions/runs/${process.env.GITHUB_RUN_ID}|View on Github Actions>`,
attachments: [],
},
};

const result = JSON.parse(fs.readFileSync(file, 'utf-8'))
const testRunResult = {
color: '#36a64f',
title: `Started at: *${result.stats.start}`,
text: `Executed ${result.stats.tests} in ${(new Date(result.stats.end) - new Date(result.stats.start)) / 1000}s`,
fields: [
{
title: 'Passed',
value: result.stats.passes,
short: true,
},
{
title: 'Skipped',
value: result.stats.skipped,
short: true,
},
],
};
const failed = result.stats.failures;
if (failed) {
results.passed = false;
testRunResult.color = '#cc0000';
testRunResult.fields.push({
title: 'Failed',
value: failed,
short: true,
});
}

results.message.attachments.push(testRunResult);

if (results.passed === false) {
results.message.text = '<!here> ' + results.message.text;
}

fs.writeFileSync('e2e.report.json', JSON.stringify({
channel: process.env.SLACK_TEST_REPORT_CHANNEL,
...results.message,
}));
15 changes: 15 additions & 0 deletions .github/e2e/test.app.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
#!/bin/bash
set -e

# Create folder before tests run to prevent permissions issues
mkdir -p tests/e2e/remote

# Run RTE (Redis Test Environment)
docker compose -f tests/e2e/rte.docker-compose.yml build
docker compose -f tests/e2e/rte.docker-compose.yml up --force-recreate -d -V
./tests/e2e/wait-for-redis.sh localhost 12000

# Run tests
RI_SOCKETS_CORS=true \
xvfb-run --auto-servernum \
yarn --cwd tests/e2e dotenv -e .ci.env yarn --cwd tests/e2e test:ci
Loading

0 comments on commit 3b7a917

Please sign in to comment.