Skip to content

Commit

Permalink
Merge pull request #147 from forcedotcom/release-1.3.0
Browse files Browse the repository at this point in the history
RELEASE @W-16879137@: Conducting v1.3.0 release
  • Loading branch information
stephen-carter-at-sf authored Oct 29, 2024
2 parents 84dcae5 + 460d63e commit 38d6425
Show file tree
Hide file tree
Showing 17 changed files with 900 additions and 292 deletions.
40 changes: 40 additions & 0 deletions .github/workflows/daily-smoke-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,4 +28,44 @@ jobs:
create-vsix-artifact:
name: 'Upload VSIX as artifact'
uses: ./.github/workflows/create-vsix-artifact.yml
# Step 4: Report any problems
report-problems:
name: 'Report problems'
runs-on: ubuntu-latest
needs: [build-scanner-tarball, smoke-test, create-vsix-artifact]
if: ${{ failure() || cancelled() }}
steps:
- name: Report problems
shell: bash
env:
IS_CRITICAL: ${{ contains(join(steps.*.outcome), 'failure') || contains(join(steps.*.outcome), 'skipped') }}
RUN_LINK: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}
run: |
if [[ ${{ env.IS_CRITICAL }} == true ]]; then
ALERT_SEV="critical"
ALERT_SUMMARY="Daily smoke test failed on ${{ runner.os }}"
else
ALERT_SEV="info"
ALERT_SUMMARY="Daily smoke test succeeded with retries on ${{ runner.os }}"
fi
generate_post_data() {
cat <<EOF
{
"payload": {
"summary": "${ALERT_SUMMARY}",
"source": "Github Actions",
"severity": "${ALERT_SEV}"
},
"links": [{
"href": "${{ env.RUN_LINK }}",
"text": "Link to action execution"
}],
"event_action": "trigger",
"dedup_key": "GH-HB-${{ matrix.os.vm }}-${{ matrix.node }}",
"routing_key": "${{ secrets.PAGERDUTY_HEARTBEAT_KEY }}"
}
EOF
}
curl --request POST --data "$(generate_post_data)" https://events.pagerduty.com/v2/enqueue
159 changes: 159 additions & 0 deletions .github/workflows/production-heartbeat.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,159 @@
name: production-heartbeat
on:
workflow_dispatch: # As per documentation, the colon is necessary even though no config is required.
schedule:
# Cron syntax is "minute[0-59] hour[0-23] date[1-31] month[1-12] day[0-6]". '*' is 'any value', and multiple values
# can be specified with comma-separated lists. All times are UTC.
# So this expression means "run at 45 minutes past 1, 5, and 9 AM/PM UTC". The hours were chosen so that
# the jobs run only close to business hours of Central Time.
# Days were chosen to run only from Monday through Friday.
- cron: '45 13,17,21 * * 1,2,3,4,5'
jobs:
production-heartbeat:
strategy:
fail-fast: false
matrix:
os: [{vm: ubuntu-latest}, {vm: windows-2019}, {vm: macos-latest}]
node: ['lts/*']
runs-on: ${{ matrix.os.vm }}
timeout-minutes: 60
steps:
# 1 Install VS Code and Extension on Ubuntu
- name: Install VS Code on Ubuntu
if: runner.os == 'Linux'
run: |
sudo apt update
sudo apt install wget gpg -y
wget -qO- https://packages.microsoft.com/keys/microsoft.asc | gpg --dearmor > packages.microsoft.gpg
sudo install -o root -g root -m 644 packages.microsoft.gpg /usr/share/keyrings/
sudo sh -c 'echo "deb [arch=amd64] https://packages.microsoft.com/repos/vscode stable main" > /etc/apt/sources.list.d/vscode.list'
sudo apt update
sudo apt install code -y
- name: Install Salesforce Code Analyzer Extension on Ubuntu
if: runner.os == 'Linux'
run: |
code --install-extension salesforce.sfdx-code-analyzer-vscode
- name: Verify Extension Installation on Ubuntu
if: runner.os == 'Linux'
run: |
if code --list-extensions | grep -q 'salesforce.sfdx-code-analyzer-vscode'; then
echo "Extension installed successfully"
else
echo "::error Extension installation failed" && exit 1
fi
# 2 Install VS Code and Extension on Windows
# We use chocolatey to install vscode since it gives a reliable path for the location of code.exe
# We have also seen Windows to be flaky, so adding addition echo statements.
- name: Install VS Code on Windows
if: runner.os == 'Windows'
run: |
Write-Host "Installing Chocolatey..."
Set-ExecutionPolicy Bypass -Scope Process -Force;
[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.ServicePointManager]::SecurityProtocol -bor 3072;
iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
Write-Host "Installing Visual Studio Code using Chocolatey..."
choco install vscode -y
- name: Install Salesforce Code Analyzer Extension on Windows
if: runner.os == 'Windows'
run: |
echo "Installing Code Analyzer Extension..."
"/c/Program Files/Microsoft VS Code/bin/code" --install-extension salesforce.sfdx-code-analyzer-vscode
echo "Installing Code Analyzer Complete"
echo "Waiting for 10 seconds..."
sleep 10
echo "Listing installed extensions..."
"/c/Program Files/Microsoft VS Code/bin/code" --list-extensions
shell: bash

- name: Verify Extension on Windows
if: runner.os == 'Windows'
run: |
echo "Waiting for 10 seconds..."
sleep 10
echo "Listing installed extensions..."
extensions=$("/c/Program Files/Microsoft VS Code/bin/code" --list-extensions)
echo "$extensions"
if echo "$extensions" | grep -q 'salesforce.sfdx-code-analyzer-vscode'; then
echo "Extension 'salesforce.sfdx-code-analyzer-vscode' is installed successfully"
else
echo "::error Extension 'salesforce.sfdx-code-analyzer-vscode' is NOT installed"
exit 1
fi
shell: bash

# 3 Install VS Code and Extension on macOS
- name: Install VS Code on macOS
if: runner.os == 'macOS'
run: |
brew install --cask visual-studio-code
- name: Install Salesforce Code Analyzer Extension on macOS
if: runner.os == 'macOS'
run: |
code --install-extension salesforce.sfdx-code-analyzer-vscode
- name: Verify Extension Installation on macOS
if: runner.os == 'macOS'
run: |
if code --list-extensions | grep -q 'salesforce.sfdx-code-analyzer-vscode'; then
echo "Extension installed successfully"
else
echo "::error Extension installation failed" && exit 1
fi
# === Report any problems ===
- name: Report problems
# There are problems if any step failed or was skipped.
# Note that the `join()` call omits null values, so if any steps were skipped, they won't have a corresponding
# value in the string.
if: ${{ failure() || cancelled() }}
shell: bash
env:
# If we're here because steps failed or were skipped, then that's a critical problem. Otherwise it's a normal one.
# We can't use the `failure()` or `cancelled()` convenience methods outside of the `if` condition, hence the
# `contains()` calls.
IS_CRITICAL: ${{ contains(join(steps.*.outcome), 'failure') || contains(join(steps.*.outcome), 'skipped') }}
# A link to this run, so the PagerDuty assignee can quickly get here.
RUN_LINK: https://github.com/${{ github.repository }}/actions/runs/${{ github.run_id }}

run: |
if [[ ${{ env.IS_CRITICAL }} == true ]]; then
ALERT_SEV="critical"
ALERT_SUMMARY="Production heartbeat script failed on ${{ runner.os }}"
else
# Leaving the else part here to help with running end-to-end sanity test with real alerts being created.
ALERT_SEV="info"
ALERT_SUMMARY="Production heartbeat script succeeded with retries on ${{ runner.os }}"
fi
# Define a helper function to create our POST request's data, to sidestep issues with nested quotations.
generate_post_data() {
# This is known as a HereDoc, and it lets us declare multi-line input ending when the specified limit string,
# in this case EOF, is encountered.
cat <<EOF
{"payload": {
"summary": "${ALERT_SUMMARY}",
"source": "Github Actions",
"severity": "${ALERT_SEV}"
},
"links": [{
"href": "${{ env.RUN_LINK }}",
"text": "Link to action execution"
}],
"event_action": "trigger",
"dedup_key": "GH-HB-${{ matrix.os.vm }}-${{ matrix.node }}",
"routing_key": "${{ secrets.PAGERDUTY_HEARTBEAT_KEY }}"
}
EOF
}
# Make our POST request
curl --request POST --data "$(generate_post_data)" https://events.pagerduty.com/v2/enqueue
2 changes: 1 addition & 1 deletion SHA256.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ make sure that their SHA values match the values in the list below.
shasum -a 256 <location_of_the_downloaded_file>

3. Confirm that the SHA in your output matches the value in this list of SHAs.
f668893331860e3b8bc89357c4bfe2cac9840ee05acd1b0d67de5a8c37518b87 ./extensions/sfdx-code-analyzer-vscode-1.1.0.vsix
146d022eebef24a355b117ad38713ac53a006f8e74cae178c6364a302878d3bc ./extensions/sfdx-code-analyzer-vscode-1.2.0.vsix
4. Change the filename extension for the file that you downloaded from .zip to
.vsix.

Expand Down
12 changes: 9 additions & 3 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
"color": "#ECECEC",
"theme": "light"
},
"version": "1.2.0",
"version": "1.3.0",
"publisher": "salesforce",
"license": "BSD-3-Clause",
"engines": {
Expand Down Expand Up @@ -57,6 +57,7 @@
"mocha": "^10.1.0",
"nyc": "^15.1.0",
"ovsx": "^0.8.3",
"proxyquire": "^2.1.3",
"sinon": "^15.1.0",
"ts-node": "^10.9.1",
"typescript": "^4.9.3"
Expand Down Expand Up @@ -101,7 +102,7 @@
},
{
"command": "sfca.runDfa",
"title": "***SFDX: Run Graph-Engine Based Analysis***"
"title": "SFDX: Scan Project with Graph Engine Path-Based Analysis"
},
{
"command": "sfca.runApexGuruAnalysisOnSelectedFile",
Expand Down Expand Up @@ -167,6 +168,11 @@
"type": "boolean",
"default": false,
"description": "Discover critical problems and performance issues in your Apex code with ApexGuru, which analyzes your Apex files for you. This feature is in a closed pilot; contact your account representative to learn more."
},
"codeAnalyzer.partialGraphEngineScans.enabled": {
"type": "boolean",
"default": false,
"description": "Enables partial Salesforce Graph Engine scans on only the code you've modified since the initial full scan. (Beta)"
}
}
},
Expand All @@ -186,7 +192,7 @@
},
{
"command": "sfca.runDfa",
"when": "false"
"when": "sfca.partialRunsEnabled"
},
{
"command": "sfca.removeDiagnosticsOnActiveFile",
Expand Down
11 changes: 9 additions & 2 deletions src/apexguru/apex-guru-service.ts
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,9 @@ export async function runApexGuruOnFile(selection: vscode.Uri, runInfo: RunInfo)
new DiagnosticManager().displayDiagnostics([selection.fsPath], [ruleResult], diagnosticCollection);
TelemetryService.sendCommandEvent(Constants.TELEM_SUCCESSFUL_APEX_GURU_FILE_ANALYSIS, {
executedCommand: commandName,
duration: (Date.now() - startTime).toString()
duration: (Date.now() - startTime).toString(),
violationsCount: ruleResult.violations.length.toString(),
violationsWithSuggestedCodeCount: getViolationsWithSuggestions(ruleResult).toString()
});
void vscode.window.showInformationMessage(messages.apexGuru.finishedScan(ruleResult.violations.length));
});
Expand All @@ -68,6 +70,11 @@ export async function runApexGuruOnFile(selection: vscode.Uri, runInfo: RunInfo)
}
}

export function getViolationsWithSuggestions(ruleResult: RuleResult): number {
// Filter violations that have a non-empty suggestedCode and get count
return ruleResult.violations.filter(violation => (violation as ApexGuruViolation).suggestedCode?.trim() !== '').length;
}

export async function pollAndGetApexGuruResponse(connection: Connection, requestId: string, maxWaitTimeInSeconds: number, retryIntervalInMillis: number): Promise<ApexGuruQueryResponse> {
let queryResponse: ApexGuruQueryResponse;
let lastErrorMessage = '';
Expand Down Expand Up @@ -147,7 +154,7 @@ export function transformStringToRuleResult(fileName: string, jsonString: string
column: 1,
currentCode: Buffer.from(encodedCodeBefore, 'base64').toString('utf8'),
suggestedCode: Buffer.from(encodedCodeAfter, 'base64').toString('utf8'),
url: fileName
url: 'https://help.salesforce.com/s/articleView?id=sf.apexguru_antipatterns.htm&type=5'
};

ruleResult.violations.push(violation);
Expand Down
35 changes: 35 additions & 0 deletions src/deltarun/delta-run-service.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
/*
* Copyright (c) 2024, Salesforce, Inc.
* All rights reserved.
* SPDX-License-Identifier: BSD-3-Clause
* For full license text, see the LICENSE file in the repo root or https://opensource.org/licenses/BSD-3-Clause
*/
import * as fs from 'fs';

export function getDeltaRunTarget(sfgecachepath:string, savedFilesCache:Set<string>): string[] {
// Read and parse the JSON file at sfgecachepath
const fileContent = fs.readFileSync(sfgecachepath, 'utf-8');
const parsedData = JSON.parse(fileContent) as CacheData;

const matchingEntries: string[] = [];

// Iterate over each file entry in the data
parsedData.data.forEach((entry: { filename: string, entries: string[] }) => {
// Check if the filename is in the savedFilesCache
if (savedFilesCache.has(entry.filename)) {
// If it matches, add the individual entries to the result array
matchingEntries.push(...entry.entries);
}
});

return matchingEntries;
}

interface CacheEntry {
filename: string;
entries: string[];
}

interface CacheData {
data: CacheEntry[];
}
Loading

0 comments on commit 38d6425

Please sign in to comment.