', 200, 'text/html', false);
- return;
- }
-
- let profileFile = Profiler.getDumpFile(queryObject.p);
- let traceBuilder = new TraceBuilder(profileFile);
-
- traceBuilder.compile(function(error, file) {
- if (error) {
- this._log(`Unable to read profile ${profileFile}: ${error}`);
- this._send500(response, error);
- return;
- }
-
- this._log(`Serving profile ${profileFile}`);
- this._send(response, file, 200, 'text/html', true);
- }.bind(this));
-
- return;
- }
-
- this._send500(response, 'You have to specify profile id');
- return;
} else if (uri === Instance.LAMBDA_URI || uri === Instance.LAMBDA_ASYNC_URI) {
let isAsync = uri === Instance.LAMBDA_ASYNC_URI;
@@ -533,17 +485,6 @@ export class Instance {
}.bind(this));
}
- /**
- * @param {Http.IncomingMessage} request
- * @returns {Boolean}
- * @private
- */
- _isTracerCompatible(request) {
- let ua = request.headers['user-agent'] || '';
-
- return /chrom(e|ium)/i.test(ua);
- }
-
/**
* @param {Http.IncomingMessage} request
* @param {Function} callback
@@ -637,13 +578,6 @@ export class Instance {
this._logger(...args);
}
- /**
- * @returns {String}
- */
- static get PROFILE_URI() {
- return '/_/profile';
- }
-
/**
* @returns {String}
*/
diff --git a/src/lib/Server/TraceBuilder.js b/src/lib/Server/TraceBuilder.js
deleted file mode 100644
index 78332397..00000000
--- a/src/lib/Server/TraceBuilder.js
+++ /dev/null
@@ -1,92 +0,0 @@
-/**
- * Created by AlexanderC on 8/17/15.
- */
-
-'use strict';
-
-import Path from 'path';
-import {exec as Exec} from 'child_process';
-import FileSystem from 'fs';
-
-export class TraceBuilder {
- /**
- * @param {String} traceFile
- */
- constructor(traceFile) {
- this._traceFile = traceFile;
- }
-
- /**
- * @returns {String}
- */
- get traceFile() {
- return this._traceFile;
- }
-
- /**
- * @param {Function} callback
- * @param {Boolean} cache
- * @returns {TraceBuilder}
- */
- compile(callback, cache = true) {
- let cacheFile = `${this._traceFile}${TraceBuilder.CACHE_EXTENSION}`;
-
- if (!cache) {
- this._compile(cacheFile, callback);
- return this;
- }
-
- FileSystem.exists(cacheFile, function(exists) {
- if (exists) {
- this._readFile(cacheFile, callback);
- return;
- }
-
- this._compile(cacheFile, callback);
- }.bind(this));
-
- return this;
- }
-
- /**
- * @param {String} filePath
- * @param {Function} callback
- * @private
- */
- _readFile(filePath, callback) {
- FileSystem.readFile(filePath, 'binary', callback);
- }
-
- /**
- * @param {String} outputFile
- * @param {Function} callback
- * @private
- */
- _compile(outputFile, callback) {
- Exec(
- `${TraceBuilder.COMPILER} ${this._traceFile} --config=full --output=${outputFile}`,
- function(error, stdout, stderr) {
- if (error) {
- callback(`Error while compiling profile: ${stderr}`, null);
- return;
- }
-
- this._readFile(outputFile, callback);
- }.bind(this)
- );
- }
-
- /**
- * @returns {String}
- */
- static get CACHE_EXTENSION() {
- return '.html.cache';
- }
-
- /**
- * @returns {String}
- */
- static get COMPILER() {
- return Path.join(__dirname, '../../tools/google_trace_viewer/tracing/trace2html');
- }
-}
diff --git a/src/lib/Terminal/Help.js b/src/lib/Terminal/Help.js
index cdb7ae8c..e35491c5 100644
--- a/src/lib/Terminal/Help.js
+++ b/src/lib/Terminal/Help.js
@@ -113,12 +113,9 @@ export class Help {
*/
static _scoreSimilarWord(string, word, fuzziness = null) {
// If the string is equal to the word, perfect match.
- if (string === word) {
+ if (string === word || string.indexOf(word) === 0) {
return 1;
- }
-
- //if it's not a perfect match and is empty return 0
- if (!word) {
+ } else if (!word) {
return 0;
}
diff --git a/src/lib/Terminal/Program.js b/src/lib/Terminal/Program.js
index 795fff77..41092e59 100644
--- a/src/lib/Terminal/Program.js
+++ b/src/lib/Terminal/Program.js
@@ -160,6 +160,8 @@ export class Program {
* @param {Array} args
*/
run(args = null) {
+ Program._logDriver.overrideJsConsole(false);
+
if (args || !this._inputParsed) {
this.input(args);
}
@@ -171,6 +173,8 @@ export class Program {
// @todo: add it for commands as well
if (showAutoCompletion && showAutoCompletion.exists) {
+ Program._logDriver.overrideJsConsole(false, false);
+
this.help.printAutoCompletion(
(this.hasCommands && command) ? command.value : ''
);
@@ -183,7 +187,7 @@ export class Program {
if (!subProgram) {
console.log('');
- console.log(`No such command '${command.value}' found!`);
+ console.error(`No such command '${command.value}' found!`);
this._outputListCommands();
this.exit(1);
@@ -211,13 +215,12 @@ export class Program {
this._validateInput();
try {
- // @todo: find a better place for this
- new DeepLog().overrideJsConsole();
+ Program._logDriver.overrideJsConsole();
this._action.bind(this)(...this._args.listValues());
} catch (e) {
- console.log(e.message);
- console.log(e.stack);
+ console.error(e.message);
+ console.error(e.stack);
this.exit(1);
}
@@ -435,4 +438,16 @@ export class Program {
static get NODE_BINARY() {
return 'node';
}
+
+ /**
+ * @returns {DeepLog}
+ * @private
+ */
+ static get _logDriver() {
+ if (!Program.hasOwnProperty('__deep_log')) {
+ Program.__deep_log = new DeepLog();
+ }
+
+ return Program.__deep_log;
+ }
}
diff --git a/src/package.json b/src/package.json
index a323ed04..ea9aa9c1 100644
--- a/src/package.json
+++ b/src/package.json
@@ -1,6 +1,6 @@
{
"name": "deepify",
- "version": "1.4.0",
+ "version": "1.5.0",
"description": "DEEP Development Tools",
"keywords": [
"Digital Enterprise End-To-End Platform",
@@ -47,9 +47,8 @@
"url": "https://github.com/MitocGroup/deepify.git"
},
"scripts": {
- "coverage": "babel-node `which istanbul` cover --report lcovonly _mocha -- --ui tdd --recursive --reporter spec",
"preinstall": "npm run compile",
- "postinstall": "hooks/assure_deep_dev.js && hooks/autocomplete_deepify.sh",
+ "postinstall": "hooks/autocomplete_deepify.sh",
"test": "babel-node `which isparta` cover --include 'lib/**/*.js' _mocha -- 'test/**/*.js' --reporter spec --ui tdd",
"compile": "if [ -d 'lib/' ]; then BABEL_ENV=production babel lib/ --out-dir lib.compiled/; fi",
"obfuscate-compiled": "if [ -d 'lib/' ]; then npm list -g --depth 0 uglify > /dev/null 2>&1 || npm install uglify -g; for f in $(find lib.compiled -type f -name *.js); do uglify -s ${f} -o ${f}; done; fi",
@@ -61,17 +60,14 @@
"deep-db": "^1.x.x",
"deep-core": "^1.x.x",
"proxyquire": "^1.5.x",
- "v8-profiler": "^5.3.x",
- "traceviewify": "^0.4.x",
"open": "^0.0.x",
- "aws-sdk": "^2.2.x",
"jsonfile": "^1.1.x",
"fs-extra": "^0.23.x",
- "sync-exec": "^0.6.x",
"tmp": "^0.0.x",
"aws-api-gw-client": "^0.1.x",
"mime": "^1.3.x",
- "gather-dependencies": "^1.0.x"
+ "gather-dependencies": "^1.0.x",
+ "aws-sdk": "^2.2.x"
},
"devDependencies": {
"chai": "^3.2.x",
@@ -79,8 +75,8 @@
"sinon-chai": "^2.8.x"
},
"engines": {
- "node": ">=0.12 <5.0.0",
- "npm": ">=2.10"
+ "node": ">=0.12 <5.0",
+ "npm": ">=2.10 <3.0"
},
"os": [
"!win32"
diff --git a/src/test/Lambda/Profile/AbstractProfiler.js b/src/test/Lambda/Profile/AbstractProfiler.js
deleted file mode 100644
index 2eb7f4f1..00000000
--- a/src/test/Lambda/Profile/AbstractProfiler.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// THIS TEST WAS GENERATED AUTOMATICALLY ON Mon Sep 14 2015 16:15:41 GMT+0300 (EEST)
-
-'use strict';
-
-import chai from 'chai';
-import {AbstractProfiler} from '../../../lib/Lambda/Profile/AbstractProfiler';
-
-// @todo: Add more advanced tests
-suite('Lambda/Profile/AbstractProfiler', function() {
- test('Class AbstractProfiler exists in Lambda/Profile/AbstractProfiler', function() {
- chai.expect(typeof AbstractProfiler).to.equal('function');
- });
-});
diff --git a/src/test/Lambda/Profile/Profiler.js b/src/test/Lambda/Profile/Profiler.js
deleted file mode 100644
index 28d10d6b..00000000
--- a/src/test/Lambda/Profile/Profiler.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// THIS TEST WAS GENERATED AUTOMATICALLY ON Mon Sep 14 2015 16:15:41 GMT+0300 (EEST)
-
-'use strict';
-
-import chai from 'chai';
-import {Profiler} from '../../../lib/Lambda/Profile/Profiler';
-
-// @todo: Add more advanced tests
-suite('Lambda/Profile/Profiler', function() {
- test('Class Profiler exists in Lambda/Profile/Profiler', function() {
- chai.expect(typeof Profiler).to.equal('function');
- });
-});
diff --git a/src/test/Lambda/Profile/StaticDumpFileProfiler.js b/src/test/Lambda/Profile/StaticDumpFileProfiler.js
deleted file mode 100644
index f4486a53..00000000
--- a/src/test/Lambda/Profile/StaticDumpFileProfiler.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// THIS TEST WAS GENERATED AUTOMATICALLY ON Mon Sep 14 2015 16:15:41 GMT+0300 (EEST)
-
-'use strict';
-
-import chai from 'chai';
-import {StaticDumpFileProfiler} from '../../../lib/Lambda/Profile/StaticDumpFileProfiler';
-
-// @todo: Add more advanced tests
-suite('Lambda/Profile/StaticDumpFileProfiler', function() {
- test('Class StaticDumpFileProfiler exists in Lambda/Profile/StaticDumpFileProfiler', function() {
- chai.expect(typeof StaticDumpFileProfiler).to.equal('function');
- });
-});
diff --git a/src/test/Server/TraceBuilder.js b/src/test/Server/TraceBuilder.js
deleted file mode 100644
index 6625c2f3..00000000
--- a/src/test/Server/TraceBuilder.js
+++ /dev/null
@@ -1,13 +0,0 @@
-// THIS TEST WAS GENERATED AUTOMATICALLY ON Mon Sep 14 2015 16:15:41 GMT+0300 (EEST)
-
-'use strict';
-
-import chai from 'chai';
-import {TraceBuilder} from '../../lib/Server/TraceBuilder';
-
-// @todo: Add more advanced tests
-suite('Server/TraceBuilder', function() {
- test('Class TraceBuilder exists in Server/TraceBuilder', function() {
- chai.expect(typeof TraceBuilder).to.equal('function');
- });
-});
diff --git a/src/tools/google_trace_viewer/build/__init__.py b/src/tools/google_trace_viewer/build/__init__.py
deleted file mode 100755
index 6464f9db..00000000
--- a/src/tools/google_trace_viewer/build/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
\ No newline at end of file
diff --git a/src/tools/google_trace_viewer/build/checklicenses.py b/src/tools/google_trace_viewer/build/checklicenses.py
deleted file mode 100755
index 8fbb9e4a..00000000
--- a/src/tools/google_trace_viewer/build/checklicenses.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-'''Makes sure that all files contain proper licensing information.'''
-
-
-import json
-import optparse
-import os.path
-import subprocess
-import sys
-
-import logging
-
-def PrintUsage():
- print '''Usage: python checklicenses.py [--root ] [tocheck]
- --root Specifies the repository root. This defaults to '../..' relative
- to the script file. This will be correct given the normal location
- of the script in '/tools/checklicenses'.
-
- tocheck Specifies the directory, relative to root, to check. This defaults
- to '.' so it checks everything.
-
-Examples:
- python checklicenses.py
- python checklicenses.py --root ~/chromium/src third_party'''
-
-
-WHITELISTED_LICENSES = [
- 'Apache (v2.0)',
- 'BSD (3 clause)',
- 'BSD-like',
- 'MIT/X11 (BSD like)',
- 'zlib/libpng',
-]
-
-
-PATH_SPECIFIC_WHITELISTED_LICENSES = {
- 'tracing/third_party/devscripts': [
- 'GPL (v2 or later)',
- ],
-}
-
-
-def check_licenses(base_directory, target_directory=None):
- # Figure out which directory we have to check.
- if not target_directory:
- # No directory to check specified, use the repository root.
- start_dir = base_directory
- else:
- # Directory specified. Start here. It's supposed to be relative to the
- # base directory.
- start_dir = os.path.abspath(os.path.join(base_directory, target_directory))
-
- logging.info('Using base directory: %s' % base_directory)
- logging.info('Checking: %s' % start_dir)
- logging.info('')
-
- licensecheck_path = os.path.abspath(os.path.join(base_directory,
- 'tracing',
- 'third_party',
- 'devscripts',
- 'licensecheck.pl'))
-
- licensecheck = subprocess.Popen([licensecheck_path,
- '-l', '100',
- '-r', start_dir],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- stdout, stderr = licensecheck.communicate()
- logging.info('----------- licensecheck stdout -----------')
- logging.info(stdout)
- logging.info('--------- end licensecheck stdout ---------')
- if licensecheck.returncode != 0 or stderr:
- print '----------- licensecheck stderr -----------'
- print stderr
- print '--------- end licensecheck stderr ---------'
- return 1
-
- used_suppressions = set()
- errors = []
-
- for line in stdout.splitlines():
- filename, license = line.split(':', 1)
- filename = os.path.relpath(filename.strip(), base_directory)
-
- # All files in the build output directory are generated one way or another.
- # There's no need to check them.
- if filename.startswith('out/'):
- continue
-
- # For now we're just interested in the license.
- license = license.replace('*No copyright*', '').strip()
-
- # Skip generated files.
- if 'GENERATED FILE' in license:
- continue
-
- if license in WHITELISTED_LICENSES:
- continue
-
- matched_prefixes = [
- prefix for prefix in PATH_SPECIFIC_WHITELISTED_LICENSES
- if filename.startswith(prefix) and
- license in PATH_SPECIFIC_WHITELISTED_LICENSES[prefix]]
- if matched_prefixes:
- used_suppressions.update(set(matched_prefixes))
- continue
-
- errors.append({'filename': filename, 'license': license})
-
- if errors:
- for error in errors:
- print "'%s' has non-whitelisted license '%s'" % (
- error['filename'], error['license'])
- print '\nFAILED\n'
- print 'Please read',
- print 'http://www.chromium.org/developers/adding-3rd-party-libraries'
- print 'for more info how to handle the failure.'
- print
- print 'Please respect OWNERS of checklicenses.py. Changes violating'
- print 'this requirement may be reverted.'
-
- # Do not print unused suppressions so that above message is clearly
- # visible and gets proper attention. Too much unrelated output
- # would be distracting and make the important points easier to miss.
-
- return 1
-
-
- return 0
-
-
-def main():
- default_root = os.path.abspath(
- os.path.join(os.path.dirname(__file__), '..'))
- option_parser = optparse.OptionParser()
- option_parser.add_option('--root', default=default_root,
- dest='base_directory',
- help='Specifies the repository root. This defaults '
- "to '..' relative to the script file, which "
- 'will normally be the repository root.')
- options, args = option_parser.parse_args()
-
- target_directory = None
- if len(args) == 1:
- target_directory = args[0]
- elif len(args) > 1:
- PrintUsage()
- return 1
- results = check_licenses(options.base_directory, target_directory)
- if not results:
- print 'SUCCESS'
- return results
-
-
-if '__main__' == __name__:
- sys.exit(main())
diff --git a/src/tools/google_trace_viewer/build/presubmit_checks.py b/src/tools/google_trace_viewer/build/presubmit_checks.py
deleted file mode 100755
index 6469b717..00000000
--- a/src/tools/google_trace_viewer/build/presubmit_checks.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
-import re
-import sys
-import time
-
-import checklicenses
-
-def _FormatError(msg, files):
- return ('%s in these files:\n' % msg +
- '\n'.join([' ' + x for x in files])
- )
-
-def _ReportErrorFileAndLine(filename, line_num, dummy_line):
- """Default error formatter for _FindNewViolationsOfRule."""
- return '%s:%s' % (filename, line_num)
-
-def _FindNewViolationsOfRule(callable_rule, input_api,
- error_formatter=_ReportErrorFileAndLine):
- """Find all newly introduced violations of a per-line rule (a callable).
-
- Arguments:
- callable_rule: a callable taking a file extension and line of input and
- returning True if the rule is satisfied and False if there was a problem.
- input_api: object to enumerate the affected files.
- source_file_filter: a filter to be passed to the input api.
- error_formatter: a callable taking (filename, line_number, line) and
- returning a formatted error string.
-
- Returns:
- A list of the newly-introduced violations reported by the rule.
- """
- errors = []
- for f in input_api.AffectedFiles(include_deletes=False):
- # For speed, we do two passes, checking first the full file. Shelling out
- # to the SCM to determine the changed region can be quite expensive on
- # Win32. Assuming that most files will be kept problem-free, we can
- # skip the SCM operations most of the time.
- extension = str(f.filename).rsplit('.', 1)[-1]
- if all(callable_rule(extension, line) for line in f.contents_as_lines):
- continue # No violation found in full text: can skip considering diff.
-
- if input_api.IsIgnoredFile(f):
- continue
-
- for line_num, line in f.changed_lines:
- if not callable_rule(extension, line):
- errors.append(error_formatter(f.filename, line_num, line))
-
- return errors
-
-def CheckCopyright(input_api):
- results = []
- results += _CheckCopyrightThirdParty(input_api)
- results += _CheckCopyrightNonThirdParty(input_api)
- return results
-
-def _CheckCopyrightThirdParty(input_api):
- results = []
- has_third_party_change = any(
- input_api.IsThirdParty(f)
- for f in input_api.AffectedFiles(include_deletes=False))
- if has_third_party_change:
- tracing_root = os.path.abspath(
- os.path.join(os.path.dirname(__file__), '..'))
- tracing_third_party = os.path.join(tracing_root, 'tracing', 'third_party')
- has_invalid_license = checklicenses.check_licenses(
- tracing_root, tracing_third_party)
- if has_invalid_license:
- results.append(
- 'License check encountered invalid licenses in tracing/third_party/.')
- return results
-
-def _CheckCopyrightNonThirdParty(input_api):
- project_name = 'Chromium'
-
- current_year = int(time.strftime('%Y'))
- allow_old_years=True
- if allow_old_years:
- allowed_years = (str(s) for s in reversed(xrange(2006, current_year + 1)))
- else:
- allowed_years = [str(current_year)]
- years_re = '(' + '|'.join(allowed_years) + ')'
-
- # The (c) is deprecated, but tolerate it until it's removed from all files.
- non_html_license_header = (
- r'.*? Copyright (\(c\) )?%(year)s The %(project)s Authors\. '
- r'All rights reserved\.\n'
- r'.*? Use of this source code is governed by a BSD-style license that '
- r'can be\n'
- r'.*? found in the LICENSE file\.(?: \*/)?\n'
- ) % {
- 'year': years_re,
- 'project': project_name,
- }
- non_html_license_re = re.compile(non_html_license_header, re.MULTILINE)
-
- html_license_header = (
- r'^Copyright (\(c\) )?%(year)s The %(project)s Authors\. '
- r'All rights reserved\.\n'
- r'Use of this source code is governed by a BSD-style license that '
- r'can be\n'
- r'found in the LICENSE file\.(?: \*/)?\n'
- ) % {
- 'year': years_re,
- 'project': project_name,
- }
- html_license_re = re.compile(html_license_header, re.MULTILINE)
-
- sources = list(s for s in input_api.AffectedFiles(include_deletes=False)
- if not input_api.IsThirdParty(s))
-
- html_sources = [f for f in sources
- if os.path.splitext(f.filename)[1] == '.html']
- non_html_sources = [f for f in sources
- if os.path.splitext(f.filename)[1] != '.html']
-
- results = []
- results += _Check(input_api, html_license_re, html_sources)
- results += _Check(input_api, non_html_license_re, non_html_sources)
- return results
-
-def _Check(input_api, license_re, sources):
- bad_files = []
- for f in sources:
- if input_api.IsIgnoredFile(f):
- continue
- contents = f.contents
- if not license_re.search(contents):
- bad_files.append(f.filename)
- if bad_files:
- return [_FormatError(
- 'License must match:\n%s\n' % license_re.pattern +
- 'Found a bad license header',
- bad_files)]
- return []
-
-def CheckLongLines(input_api, maxlen=80):
- """Checks that there aren't any lines longer than maxlen characters in any of
- the text files to be submitted.
- """
- maxlens = {
- '': maxlen,
- }
-
- # Language specific exceptions to max line length.
- # '.h' is considered an obj-c file extension, since OBJC_EXCEPTIONS are a
- # superset of CPP_EXCEPTIONS.
- CPP_FILE_EXTS = ('c', 'cc')
- CPP_EXCEPTIONS = ('#define', '#endif', '#if', '#include', '#pragma')
- JAVA_FILE_EXTS = ('java',)
- JAVA_EXCEPTIONS = ('import ', 'package ')
- OBJC_FILE_EXTS = ('h', 'm', 'mm')
- OBJC_EXCEPTIONS = ('#define', '#endif', '#if', '#import', '#include',
- '#pragma')
-
- LANGUAGE_EXCEPTIONS = [
- (CPP_FILE_EXTS, CPP_EXCEPTIONS),
- (JAVA_FILE_EXTS, JAVA_EXCEPTIONS),
- (OBJC_FILE_EXTS, OBJC_EXCEPTIONS),
- ]
-
- def no_long_lines(file_extension, line):
- # Check for language specific exceptions.
- if any(file_extension in exts and line.startswith(exceptions)
- for exts, exceptions in LANGUAGE_EXCEPTIONS):
- return True
-
- file_maxlen = maxlens.get(file_extension, maxlens[''])
- # Stupidly long symbols that needs to be worked around if takes 66% of line.
- long_symbol = file_maxlen * 2 / 3
- # Hard line length limit at 50% more.
- extra_maxlen = file_maxlen * 3 / 2
-
- line_len = len(line)
- if line_len <= file_maxlen:
- return True
-
- if '@suppress longLineCheck' in line:
- return True
-
- if line_len > extra_maxlen:
- return False
-
- if any((url in line) for url in ('file://', 'http://', 'https://')):
- return True
-
- if 'url(' in line and file_extension == 'css':
- return True
-
- if ',, @@$
- """
- return self._depot_tools_affected_file.ChangedContents()
-
-
-class TvInputAPI(object):
- """Thin wrapper around InputAPI class from depot_tools.
-
- See tools/depot_tools/presubmit_support.py in the Chromium tree.
- """
- # TODO(petrcermak): Get rid of this class and use the wrapped object directly
- # (https://github.com/google/trace-viewer/issues/932).
- def __init__(self, depot_tools_input_api):
- self._depot_tools_input_api = depot_tools_input_api
-
- def AffectedFiles(self, *args, **kwargs):
- return map(_AffectedFile,
- self._depot_tools_input_api.AffectedFiles(*args, **kwargs))
-
- def IsIgnoredFile(self, affected_file):
- if affected_file.filename.endswith('.png'):
- return True
-
- if affected_file.filename.endswith('.svg'):
- return True
-
- if affected_file.filename.endswith('.skp'):
- return True
-
- if (affected_file.filename.endswith('.gypi') or
- affected_file.filename.endswith('.gyp') or
- affected_file.filename.endswith('.gn')):
- return True
-
- if self.IsThirdParty(affected_file):
- return True
-
- # Is test data?
- test_data_path = tracing_project.TracingProject.test_data_path
- if affected_file.absolute_path.startswith(test_data_path):
- return True
-
- if (affected_file.filename.startswith('.gitignore') or
- affected_file.filename.startswith('codereview.settings') or
- affected_file.filename.startswith('tracing/.allow-devtools-save') or
- affected_file.filename.startswith('tracing/AUTHORS') or
- affected_file.filename.startswith('tracing/LICENSE') or
- affected_file.filename.startswith('tracing/OWNERS') or
- affected_file.filename.startswith('tracing/bower.json') or
- affected_file.filename.startswith('tracing/.gitignore') or
- affected_file.filename.startswith('tracing/.bowerrc') or
- affected_file.filename.startswith('tracing/README.md') or
- affected_file.filename.startswith(
- 'tracing/examples/string_convert.js')):
- return True
-
- return False
-
- def IsThirdParty(self, affected_file):
- return affected_file.filename.startswith('tracing/third_party')
diff --git a/src/tools/google_trace_viewer/hooks/__init__.py b/src/tools/google_trace_viewer/hooks/__init__.py
deleted file mode 100755
index 3ee068de..00000000
--- a/src/tools/google_trace_viewer/hooks/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
-import sys
diff --git a/src/tools/google_trace_viewer/hooks/install.py b/src/tools/google_trace_viewer/hooks/install.py
deleted file mode 100755
index aa41f3d3..00000000
--- a/src/tools/google_trace_viewer/hooks/install.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-import os
-
-_TOP_PATH = os.path.abspath(os.path.join(
- os.path.dirname(__file__), '..'))
-
-class Link(object):
- def __init__(self, dst_path, src_path):
- self.dst_path = dst_path
- self.src_path = src_path
-
- def Update(self):
- full_src_path = os.path.join(_TOP_PATH, self.src_path)
- full_dst_path = os.path.join(_TOP_PATH, self.dst_path)
-
- full_dst_path_dirname = os.path.dirname(full_dst_path)
-
- src_path_rel = os.path.relpath(full_src_path, full_dst_path_dirname)
-
- assert os.path.exists(full_src_path)
- if not os.path.exists(full_dst_path_dirname):
- sys.stdout.write('ERROR\n\n')
- sys.stdout.write(' dst dir doesn\'t exist\n' % self.full_dst_path_dirname)
- sys.stdout.write('\n\n')
- sys.exit(255)
-
- if os.path.exists(full_dst_path) or os.path.islink(full_dst_path):
- if not os.path.islink(full_dst_path):
- sys.stdout.write('ERROR\n\n')
- sys.stdout.write(' Cannot install %s, dst already exists:\n %s\n' % (
- os.path.basename(self.src_path), full_dst_path))
- sys.stdout.write('\n\n')
- sys.exit(255)
-
- existing_src_path_rel = os.readlink(full_dst_path)
- if existing_src_path_rel == src_path_rel:
- return
- else:
- sys.stdout.write('ERROR\n\n')
- sys.stdout.write(' Cannot install %s, because %s is linked elsewhere.\n' % (
- os.path.basename(self.src_path),
- os.path.relpath(full_dst_path)))
- sys.stdout.write('\n\n')
- sys.exit(255)
-
- os.symlink(src_path_rel, full_dst_path)
-
-def InstallHooks():
- if sys.platform == 'win32':
- return
-
- # Remove old pre-commit, see https://github.com/google/trace-viewer/issues/932
- old_precommit = os.path.join(_TOP_PATH, '.git', 'hooks', 'pre-commit')
- old_precommit_target = os.path.join(_TOP_PATH, 'hooks', 'pre_commit')
- if (os.path.islink(old_precommit) and
- os.path.abspath(os.readlink(old_precommit)) == old_precommit_target):
- os.remove(old_precommit)
-
- links = []
- links.append(Link(os.path.join('.git', 'hooks', 'pre-push'),
- os.path.join('hooks/pre_push')))
-
- for l in links:
- l.Update()
diff --git a/src/tools/google_trace_viewer/hooks/pre_push b/src/tools/google_trace_viewer/hooks/pre_push
deleted file mode 100755
index 7e7a2961..00000000
--- a/src/tools/google_trace_viewer/hooks/pre_push
+++ /dev/null
@@ -1,52 +0,0 @@
-#!/usr/bin/env python
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import sys
-import subprocess
-
-"""Detect forced pushes (on the client) and prompt the user before going on."""
-
-def read_from_tty():
- try:
- import posix # No way to do this on Windows, just give up there.
- with open('/dev/tty') as tty_fd:
- return tty_fd.readline().strip()
- except:
- return None
-
-
-def Main():
- # Allow force pushes in repos forked elsewhere (e.g. googlesource).
- remote_url = sys.argv[2] if len(sys.argv) >= 2 else ''
- if 'github.com' not in remote_url:
- return 0
-
- parts = sys.stdin.readline().split()
- if len(parts) < 4:
- return 0
- local_ref, local_sha, remote_ref, remote_sha = parts
- cmd = ['git', 'rev-list', '--count', remote_sha, '--not', local_sha,
- '--max-count=1']
-
- is_force_push = '0'
- try:
- is_force_push = subprocess.check_output(cmd).strip()
- except(subprocess.CalledProcessError):
- return 0
-
- if is_force_push != '0':
- sys.stderr.write('\033[31mWARNING: Force pushing will break the ' +
- 'github.com -> googlesource.com mirroring.\033[0m\n' +
- 'This is almost certainly a bad idea.\n')
-
- sys.stderr.write('Type y to continue: ')
- if read_from_tty() != 'y':
- return 1
-
- return 0
-
-
-if __name__ == '__main__':
- sys.exit(Main())
diff --git a/src/tools/google_trace_viewer/perf_insights/README.md b/src/tools/google_trace_viewer/perf_insights/README.md
deleted file mode 100755
index 56ccaf1d..00000000
--- a/src/tools/google_trace_viewer/perf_insights/README.md
+++ /dev/null
@@ -1,7 +0,0 @@
-
-This folder contains code for http://performance-insights.appspot.com/,
-and related client scripts. This is our UI for bulk-processing of trace data in
-order to understand performance issues in Chrome, at scale.
\ No newline at end of file
diff --git a/src/tools/google_trace_viewer/perf_insights/__init__.py b/src/tools/google_trace_viewer/perf_insights/__init__.py
deleted file mode 100755
index 6464f9db..00000000
--- a/src/tools/google_trace_viewer/perf_insights/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
\ No newline at end of file
diff --git a/src/tools/google_trace_viewer/perf_insights/app.yaml b/src/tools/google_trace_viewer/perf_insights/app.yaml
deleted file mode 100755
index 061114e1..00000000
--- a/src/tools/google_trace_viewer/perf_insights/app.yaml
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-application: performance-insights
-
-version: 1
-api_version: 1
-runtime: python27
-threadsafe: true
-
-builtins:
-- remote_api: on
-
-handlers:
-- url: /upload
- script: perf_insights.upload.app
- secure: always
diff --git a/src/tools/google_trace_viewer/perf_insights/bin/__init__.py b/src/tools/google_trace_viewer/perf_insights/bin/__init__.py
deleted file mode 100755
index 6464f9db..00000000
--- a/src/tools/google_trace_viewer/perf_insights/bin/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
\ No newline at end of file
diff --git a/src/tools/google_trace_viewer/perf_insights/bin/map_traces b/src/tools/google_trace_viewer/perf_insights/bin/map_traces
deleted file mode 100755
index 355724d7..00000000
--- a/src/tools/google_trace_viewer/perf_insights/bin/map_traces
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-if __name__ == '__main__':
- top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
- sys.path.append(top_dir)
- from perf_insights.bin import map_traces
- sys.exit(map_traces.Main(sys.argv[1:]))
diff --git a/src/tools/google_trace_viewer/perf_insights/bin/map_traces.py b/src/tools/google_trace_viewer/perf_insights/bin/map_traces.py
deleted file mode 100755
index a0ea9e5b..00000000
--- a/src/tools/google_trace_viewer/perf_insights/bin/map_traces.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-def Main(args):
- pass
diff --git a/src/tools/google_trace_viewer/perf_insights/bin/wr b/src/tools/google_trace_viewer/perf_insights/bin/wr
deleted file mode 100755
index e9c28cf1..00000000
--- a/src/tools/google_trace_viewer/perf_insights/bin/wr
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-if __name__ == '__main__':
- top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
- sys.path.append(top_dir)
- from perf_insights.bin import wr
- sys.exit(wr.Main(sys.argv[1:]))
diff --git a/src/tools/google_trace_viewer/perf_insights/bin/wr.py b/src/tools/google_trace_viewer/perf_insights/bin/wr.py
deleted file mode 100755
index a0ea9e5b..00000000
--- a/src/tools/google_trace_viewer/perf_insights/bin/wr.py
+++ /dev/null
@@ -1,6 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-def Main(args):
- pass
diff --git a/src/tools/google_trace_viewer/perf_insights/index.yaml b/src/tools/google_trace_viewer/perf_insights/index.yaml
deleted file mode 100755
index e96f2643..00000000
--- a/src/tools/google_trace_viewer/perf_insights/index.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-indexes:
-
-- kind: TraceInfo
- properties:
- - name: prod
- - name: date
- direction: desc
-
-- kind: TraceInfo
- properties:
- - name: ver
- - name: date
- direction: desc
-
-- kind: TraceInfo
- properties:
- - name: prod
- - name: ver
- - name: date
- direction: desc
diff --git a/src/tools/google_trace_viewer/perf_insights/perf_insights/__init__.py b/src/tools/google_trace_viewer/perf_insights/perf_insights/__init__.py
deleted file mode 100755
index 6464f9db..00000000
--- a/src/tools/google_trace_viewer/perf_insights/perf_insights/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import os
\ No newline at end of file
diff --git a/src/tools/google_trace_viewer/perf_insights/perf_insights/trace_info.py b/src/tools/google_trace_viewer/perf_insights/perf_insights/trace_info.py
deleted file mode 100755
index 079836c8..00000000
--- a/src/tools/google_trace_viewer/perf_insights/perf_insights/trace_info.py
+++ /dev/null
@@ -1,12 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-from google.appengine.ext import ndb
-
-
-class TraceInfo(ndb.Model):
- prod = ndb.StringProperty(indexed=True)
- ver = ndb.StringProperty(indexed=True)
- remote_addr = ndb.StringProperty(indexed=True)
- date = ndb.DateTimeProperty(auto_now_add=True, indexed=True)
diff --git a/src/tools/google_trace_viewer/perf_insights/perf_insights/upload.py b/src/tools/google_trace_viewer/perf_insights/perf_insights/upload.py
deleted file mode 100755
index 29b21abb..00000000
--- a/src/tools/google_trace_viewer/perf_insights/perf_insights/upload.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-import webapp2
-import uuid
-
-from perf_insights import trace_info
-
-sys.path.append('third_party')
-import cloudstorage as gcs
-
-default_retry_params = gcs.RetryParams(initial_delay=0.2,
- max_delay=5.0,
- backoff_factor=2,
- max_retry_period=15)
-gcs.set_default_retry_params(default_retry_params)
-
-
-class UploadPage(webapp2.RequestHandler):
-
- def get(self):
- self.response.out.write("""
-
- Performance Insights - Trace Uploader
-
- """)
-
- def post(self):
- trace_uuid = str(uuid.uuid4())
- bucket_name = ('/performance-insights/' + trace_uuid)
- gcs_file = gcs.open(bucket_name,
- 'w',
- content_type='application/octet-stream',
- options={},
- retry_params=default_retry_params)
- gcs_file.write(self.request.get('trace'))
- gcs_file.close()
-
- trace_object = trace_info.TraceInfo(id=trace_uuid)
- trace_object.prod = self.request.get('prod')
- trace_object.ver = self.request.get('product_version')
- trace_object.remote_addr = os.environ["REMOTE_ADDR"]
- trace_object.put()
-
-app = webapp2.WSGIApplication([('/upload', UploadPage)])
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/COPYING b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/COPYING
deleted file mode 100755
index b09cd785..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/COPYING
+++ /dev/null
@@ -1,201 +0,0 @@
-Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/README.chromium b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/README.chromium
deleted file mode 100755
index fe387323..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/README.chromium
+++ /dev/null
@@ -1,12 +0,0 @@
-Name: Google Cloud Storage Client Library
-URL: https://github.com/GoogleCloudPlatform/appengine-gcs-client
-License: Apache 2.0
-
-Description:
-The Google Cloud Storage client library is a client-side library that is not
-dependent on any specific version of App Engine for production use.
-
-Modifications:
-
-
-Full license is in the COPYING file.
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/__init__.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/__init__.py
deleted file mode 100755
index 349a021a..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2014 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Client Library for Google Cloud Storage."""
-
-
-
-
-from .api_utils import RetryParams
-from .api_utils import set_default_retry_params
-from cloudstorage_api import *
-from .common import CSFileStat
-from .common import GCSFileStat
-from .common import validate_bucket_name
-from .common import validate_bucket_path
-from .common import validate_file_path
-from errors import *
-from storage_api import *
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/api_utils.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/api_utils.py
deleted file mode 100755
index 680ac6bc..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/api_utils.py
+++ /dev/null
@@ -1,353 +0,0 @@
-# Copyright 2013 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Util functions and classes for cloudstorage_api."""
-
-
-
-__all__ = ['set_default_retry_params',
- 'RetryParams',
- ]
-
-import copy
-import httplib
-import logging
-import math
-import os
-import threading
-import time
-import urllib
-
-
-try:
- from google.appengine.api import app_identity
- from google.appengine.api import urlfetch
- from google.appengine.datastore import datastore_rpc
- from google.appengine.ext import ndb
- from google.appengine.ext.ndb import eventloop
- from google.appengine.ext.ndb import tasklets
- from google.appengine.ext.ndb import utils
- from google.appengine import runtime
- from google.appengine.runtime import apiproxy_errors
-except ImportError:
- from google.appengine.api import app_identity
- from google.appengine.api import urlfetch
- from google.appengine.datastore import datastore_rpc
- from google.appengine import runtime
- from google.appengine.runtime import apiproxy_errors
- from google.appengine.ext import ndb
- from google.appengine.ext.ndb import eventloop
- from google.appengine.ext.ndb import tasklets
- from google.appengine.ext.ndb import utils
-
-
-_RETRIABLE_EXCEPTIONS = (urlfetch.DownloadError,
- apiproxy_errors.Error,
- app_identity.InternalError,
- app_identity.BackendDeadlineExceeded)
-
-_thread_local_settings = threading.local()
-_thread_local_settings.default_retry_params = None
-
-
-def set_default_retry_params(retry_params):
- """Set a default RetryParams for current thread current request."""
- _thread_local_settings.default_retry_params = copy.copy(retry_params)
-
-
-def _get_default_retry_params():
- """Get default RetryParams for current request and current thread.
-
- Returns:
- A new instance of the default RetryParams.
- """
- default = getattr(_thread_local_settings, 'default_retry_params', None)
- if default is None or not default.belong_to_current_request():
- return RetryParams()
- else:
- return copy.copy(default)
-
-
-def _quote_filename(filename):
- """Quotes filename to use as a valid URI path.
-
- Args:
- filename: user provided filename. /bucket/filename.
-
- Returns:
- The filename properly quoted to use as URI's path component.
- """
- return urllib.quote(filename)
-
-
-def _unquote_filename(filename):
- """Unquotes a valid URI path back to its filename.
-
- This is the opposite of _quote_filename.
-
- Args:
- filename: a quoted filename. /bucket/some%20filename.
-
- Returns:
- The filename unquoted.
- """
- return urllib.unquote(filename)
-
-
-def _should_retry(resp):
- """Given a urlfetch response, decide whether to retry that request."""
- return (resp.status_code == httplib.REQUEST_TIMEOUT or
- (resp.status_code >= 500 and
- resp.status_code < 600))
-
-
-class _RetryWrapper(object):
- """A wrapper that wraps retry logic around any tasklet."""
-
- def __init__(self,
- retry_params,
- retriable_exceptions=_RETRIABLE_EXCEPTIONS,
- should_retry=lambda r: False):
- """Init.
-
- Args:
- retry_params: an RetryParams instance.
- retriable_exceptions: a list of exception classes that are retriable.
- should_retry: a function that takes a result from the tasklet and returns
- a boolean. True if the result should be retried.
- """
- self.retry_params = retry_params
- self.retriable_exceptions = retriable_exceptions
- self.should_retry = should_retry
-
- @ndb.tasklet
- def run(self, tasklet, **kwds):
- """Run a tasklet with retry.
-
- The retry should be transparent to the caller: if no results
- are successful, the exception or result from the last retry is returned
- to the caller.
-
- Args:
- tasklet: the tasklet to run.
- **kwds: keywords arguments to run the tasklet.
-
- Raises:
- The exception from running the tasklet.
-
- Returns:
- The result from running the tasklet.
- """
- start_time = time.time()
- n = 1
-
- while True:
- e = None
- result = None
- got_result = False
-
- try:
- result = yield tasklet(**kwds)
- got_result = True
- if not self.should_retry(result):
- raise ndb.Return(result)
- except runtime.DeadlineExceededError:
- logging.debug(
- 'Tasklet has exceeded request deadline after %s seconds total',
- time.time() - start_time)
- raise
- except self.retriable_exceptions, e:
- pass
-
- if n == 1:
- logging.debug('Tasklet is %r', tasklet)
-
- delay = self.retry_params.delay(n, start_time)
-
- if delay <= 0:
- logging.debug(
- 'Tasklet failed after %s attempts and %s seconds in total',
- n, time.time() - start_time)
- if got_result:
- raise ndb.Return(result)
- elif e is not None:
- raise e
- else:
- assert False, 'Should never reach here.'
-
- if got_result:
- logging.debug(
- 'Got result %r from tasklet.', result)
- else:
- logging.debug(
- 'Got exception "%r" from tasklet.', e)
- logging.debug('Retry in %s seconds.', delay)
- n += 1
- yield tasklets.sleep(delay)
-
-
-class RetryParams(object):
- """Retry configuration parameters."""
-
- _DEFAULT_USER_AGENT = 'App Engine Python GCS Client'
-
- @datastore_rpc._positional(1)
- def __init__(self,
- backoff_factor=2.0,
- initial_delay=0.1,
- max_delay=10.0,
- min_retries=3,
- max_retries=6,
- max_retry_period=30.0,
- urlfetch_timeout=None,
- save_access_token=False,
- _user_agent=None):
- """Init.
-
- This object is unique per request per thread.
-
- Library will retry according to this setting when App Engine Server
- can't call urlfetch, urlfetch timed out, or urlfetch got a 408 or
- 500-600 response.
-
- Args:
- backoff_factor: exponential backoff multiplier.
- initial_delay: seconds to delay for the first retry.
- max_delay: max seconds to delay for every retry.
- min_retries: min number of times to retry. This value is automatically
- capped by max_retries.
- max_retries: max number of times to retry. Set this to 0 for no retry.
- max_retry_period: max total seconds spent on retry. Retry stops when
- this period passed AND min_retries has been attempted.
- urlfetch_timeout: timeout for urlfetch in seconds. Could be None,
- in which case the value will be chosen by urlfetch module.
- save_access_token: persist access token to datastore to avoid
- excessive usage of GetAccessToken API. Usually the token is cached
- in process and in memcache. In some cases, memcache isn't very
- reliable.
- _user_agent: The user agent string that you want to use in your requests.
- """
- self.backoff_factor = self._check('backoff_factor', backoff_factor)
- self.initial_delay = self._check('initial_delay', initial_delay)
- self.max_delay = self._check('max_delay', max_delay)
- self.max_retry_period = self._check('max_retry_period', max_retry_period)
- self.max_retries = self._check('max_retries', max_retries, True, int)
- self.min_retries = self._check('min_retries', min_retries, True, int)
- if self.min_retries > self.max_retries:
- self.min_retries = self.max_retries
-
- self.urlfetch_timeout = None
- if urlfetch_timeout is not None:
- self.urlfetch_timeout = self._check('urlfetch_timeout', urlfetch_timeout)
- self.save_access_token = self._check('save_access_token', save_access_token,
- True, bool)
- self._user_agent = _user_agent or self._DEFAULT_USER_AGENT
-
- self._request_id = os.getenv('REQUEST_LOG_ID')
-
- def __eq__(self, other):
- if not isinstance(other, self.__class__):
- return False
- return self.__dict__ == other.__dict__
-
- def __ne__(self, other):
- return not self.__eq__(other)
-
- @classmethod
- def _check(cls, name, val, can_be_zero=False, val_type=float):
- """Check init arguments.
-
- Args:
- name: name of the argument. For logging purpose.
- val: value. Value has to be non negative number.
- can_be_zero: whether value can be zero.
- val_type: Python type of the value.
-
- Returns:
- The value.
-
- Raises:
- ValueError: when invalid value is passed in.
- TypeError: when invalid value type is passed in.
- """
- valid_types = [val_type]
- if val_type is float:
- valid_types.append(int)
-
- if type(val) not in valid_types:
- raise TypeError(
- 'Expect type %s for parameter %s' % (val_type.__name__, name))
- if val < 0:
- raise ValueError(
- 'Value for parameter %s has to be greater than 0' % name)
- if not can_be_zero and val == 0:
- raise ValueError(
- 'Value for parameter %s can not be 0' % name)
- return val
-
- def belong_to_current_request(self):
- return os.getenv('REQUEST_LOG_ID') == self._request_id
-
- def delay(self, n, start_time):
- """Calculate delay before the next retry.
-
- Args:
- n: the number of current attempt. The first attempt should be 1.
- start_time: the time when retry started in unix time.
-
- Returns:
- Number of seconds to wait before next retry. -1 if retry should give up.
- """
- if (n > self.max_retries or
- (n > self.min_retries and
- time.time() - start_time > self.max_retry_period)):
- return -1
- return min(
- math.pow(self.backoff_factor, n-1) * self.initial_delay,
- self.max_delay)
-
-
-def _run_until_rpc():
- """Eagerly evaluate tasklets until it is blocking on some RPC.
-
- Usually ndb eventloop el isn't run until some code calls future.get_result().
-
- When an async tasklet is called, the tasklet wrapper evaluates the tasklet
- code into a generator, enqueues a callback _help_tasklet_along onto
- the el.current queue, and returns a future.
-
- _help_tasklet_along, when called by the el, will
- get one yielded value from the generator. If the value if another future,
- set up a callback _on_future_complete to invoke _help_tasklet_along
- when the dependent future fulfills. If the value if a RPC, set up a
- callback _on_rpc_complete to invoke _help_tasklet_along when the RPC fulfills.
- Thus _help_tasklet_along drills down
- the chain of futures until some future is blocked by RPC. El runs
- all callbacks and constantly check pending RPC status.
- """
- el = eventloop.get_event_loop()
- while el.current:
- el.run0()
-
-
-def _eager_tasklet(tasklet):
- """Decorator to turn tasklet to run eagerly."""
-
- @utils.wrapping(tasklet)
- def eager_wrapper(*args, **kwds):
- fut = tasklet(*args, **kwds)
- _run_until_rpc()
- return fut
-
- return eager_wrapper
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/cloudstorage_api.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/cloudstorage_api.py
deleted file mode 100755
index ba8be862..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/cloudstorage_api.py
+++ /dev/null
@@ -1,451 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""File Interface for Google Cloud Storage."""
-
-
-
-from __future__ import with_statement
-
-
-
-__all__ = ['delete',
- 'listbucket',
- 'open',
- 'stat',
- ]
-
-import logging
-import StringIO
-import urllib
-import xml.etree.cElementTree as ET
-from . import api_utils
-from . import common
-from . import errors
-from . import storage_api
-
-
-
-def open(filename,
- mode='r',
- content_type=None,
- options=None,
- read_buffer_size=storage_api.ReadBuffer.DEFAULT_BUFFER_SIZE,
- retry_params=None,
- _account_id=None):
- """Opens a Google Cloud Storage file and returns it as a File-like object.
-
- Args:
- filename: A Google Cloud Storage filename of form '/bucket/filename'.
- mode: 'r' for reading mode. 'w' for writing mode.
- In reading mode, the file must exist. In writing mode, a file will
- be created or be overrode.
- content_type: The MIME type of the file. str. Only valid in writing mode.
- options: A str->basestring dict to specify additional headers to pass to
- GCS e.g. {'x-goog-acl': 'private', 'x-goog-meta-foo': 'foo'}.
- Supported options are x-goog-acl, x-goog-meta-, cache-control,
- content-disposition, and content-encoding.
- Only valid in writing mode.
- See https://developers.google.com/storage/docs/reference-headers
- for details.
- read_buffer_size: The buffer size for read. Read keeps a buffer
- and prefetches another one. To minimize blocking for large files,
- always read by buffer size. To minimize number of RPC requests for
- small files, set a large buffer size. Max is 30MB.
- retry_params: An instance of api_utils.RetryParams for subsequent calls
- to GCS from this file handle. If None, the default one is used.
- _account_id: Internal-use only.
-
- Returns:
- A reading or writing buffer that supports File-like interface. Buffer
- must be closed after operations are done.
-
- Raises:
- errors.AuthorizationError: if authorization failed.
- errors.NotFoundError: if an object that's expected to exist doesn't.
- ValueError: invalid open mode or if content_type or options are specified
- in reading mode.
- """
- common.validate_file_path(filename)
- api = storage_api._get_storage_api(retry_params=retry_params,
- account_id=_account_id)
- filename = api_utils._quote_filename(filename)
-
- if mode == 'w':
- common.validate_options(options)
- return storage_api.StreamingBuffer(api, filename, content_type, options)
- elif mode == 'r':
- if content_type or options:
- raise ValueError('Options and content_type can only be specified '
- 'for writing mode.')
- return storage_api.ReadBuffer(api,
- filename,
- buffer_size=read_buffer_size)
- else:
- raise ValueError('Invalid mode %s.' % mode)
-
-
-def delete(filename, retry_params=None, _account_id=None):
- """Delete a Google Cloud Storage file.
-
- Args:
- filename: A Google Cloud Storage filename of form '/bucket/filename'.
- retry_params: An api_utils.RetryParams for this call to GCS. If None,
- the default one is used.
- _account_id: Internal-use only.
-
- Raises:
- errors.NotFoundError: if the file doesn't exist prior to deletion.
- """
- api = storage_api._get_storage_api(retry_params=retry_params,
- account_id=_account_id)
- common.validate_file_path(filename)
- filename = api_utils._quote_filename(filename)
- status, resp_headers, content = api.delete_object(filename)
- errors.check_status(status, [204], filename, resp_headers=resp_headers,
- body=content)
-
-
-def stat(filename, retry_params=None, _account_id=None):
- """Get GCSFileStat of a Google Cloud storage file.
-
- Args:
- filename: A Google Cloud Storage filename of form '/bucket/filename'.
- retry_params: An api_utils.RetryParams for this call to GCS. If None,
- the default one is used.
- _account_id: Internal-use only.
-
- Returns:
- a GCSFileStat object containing info about this file.
-
- Raises:
- errors.AuthorizationError: if authorization failed.
- errors.NotFoundError: if an object that's expected to exist doesn't.
- """
- common.validate_file_path(filename)
- api = storage_api._get_storage_api(retry_params=retry_params,
- account_id=_account_id)
- status, headers, content = api.head_object(
- api_utils._quote_filename(filename))
- errors.check_status(status, [200], filename, resp_headers=headers,
- body=content)
- file_stat = common.GCSFileStat(
- filename=filename,
- st_size=common.get_stored_content_length(headers),
- st_ctime=common.http_time_to_posix(headers.get('last-modified')),
- etag=headers.get('etag'),
- content_type=headers.get('content-type'),
- metadata=common.get_metadata(headers))
-
- return file_stat
-
-
-def _copy2(src, dst, metadata=None, retry_params=None):
- """Copy the file content from src to dst.
-
- Internal use only!
-
- Args:
- src: /bucket/filename
- dst: /bucket/filename
- metadata: a dict of metadata for this copy. If None, old metadata is copied.
- For example, {'x-goog-meta-foo': 'bar'}.
- retry_params: An api_utils.RetryParams for this call to GCS. If None,
- the default one is used.
-
- Raises:
- errors.AuthorizationError: if authorization failed.
- errors.NotFoundError: if an object that's expected to exist doesn't.
- """
- common.validate_file_path(src)
- common.validate_file_path(dst)
-
- if metadata is None:
- metadata = {}
- copy_meta = 'COPY'
- else:
- copy_meta = 'REPLACE'
- metadata.update({'x-goog-copy-source': src,
- 'x-goog-metadata-directive': copy_meta})
-
- api = storage_api._get_storage_api(retry_params=retry_params)
- status, resp_headers, content = api.put_object(
- api_utils._quote_filename(dst), headers=metadata)
- errors.check_status(status, [200], src, metadata, resp_headers, body=content)
-
-
-def listbucket(path_prefix, marker=None, prefix=None, max_keys=None,
- delimiter=None, retry_params=None, _account_id=None):
- """Returns a GCSFileStat iterator over a bucket.
-
- Optional arguments can limit the result to a subset of files under bucket.
-
- This function has two modes:
- 1. List bucket mode: Lists all files in the bucket without any concept of
- hierarchy. GCS doesn't have real directory hierarchies.
- 2. Directory emulation mode: If you specify the 'delimiter' argument,
- it is used as a path separator to emulate a hierarchy of directories.
- In this mode, the "path_prefix" argument should end in the delimiter
- specified (thus designates a logical directory). The logical directory's
- contents, both files and subdirectories, are listed. The names of
- subdirectories returned will end with the delimiter. So listbucket
- can be called with the subdirectory name to list the subdirectory's
- contents.
-
- Args:
- path_prefix: A Google Cloud Storage path of format "/bucket" or
- "/bucket/prefix". Only objects whose fullpath starts with the
- path_prefix will be returned.
- marker: Another path prefix. Only objects whose fullpath starts
- lexicographically after marker will be returned (exclusive).
- prefix: Deprecated. Use path_prefix.
- max_keys: The limit on the number of objects to return. int.
- For best performance, specify max_keys only if you know how many objects
- you want. Otherwise, this method requests large batches and handles
- pagination for you.
- delimiter: Use to turn on directory mode. str of one or multiple chars
- that your bucket uses as its directory separator.
- retry_params: An api_utils.RetryParams for this call to GCS. If None,
- the default one is used.
- _account_id: Internal-use only.
-
- Examples:
- For files "/bucket/a",
- "/bucket/bar/1"
- "/bucket/foo",
- "/bucket/foo/1", "/bucket/foo/2/1", "/bucket/foo/3/1",
-
- Regular mode:
- listbucket("/bucket/f", marker="/bucket/foo/1")
- will match "/bucket/foo/2/1", "/bucket/foo/3/1".
-
- Directory mode:
- listbucket("/bucket/", delimiter="/")
- will match "/bucket/a, "/bucket/bar/" "/bucket/foo", "/bucket/foo/".
- listbucket("/bucket/foo/", delimiter="/")
- will match "/bucket/foo/1", "/bucket/foo/2/", "/bucket/foo/3/"
-
- Returns:
- Regular mode:
- A GCSFileStat iterator over matched files ordered by filename.
- The iterator returns GCSFileStat objects. filename, etag, st_size,
- st_ctime, and is_dir are set.
-
- Directory emulation mode:
- A GCSFileStat iterator over matched files and directories ordered by
- name. The iterator returns GCSFileStat objects. For directories,
- only the filename and is_dir fields are set.
-
- The last name yielded can be used as next call's marker.
- """
- if prefix:
- common.validate_bucket_path(path_prefix)
- bucket = path_prefix
- else:
- bucket, prefix = common._process_path_prefix(path_prefix)
-
- if marker and marker.startswith(bucket):
- marker = marker[len(bucket) + 1:]
-
- api = storage_api._get_storage_api(retry_params=retry_params,
- account_id=_account_id)
- options = {}
- if marker:
- options['marker'] = marker
- if max_keys:
- options['max-keys'] = max_keys
- if prefix:
- options['prefix'] = prefix
- if delimiter:
- options['delimiter'] = delimiter
-
- return _Bucket(api, bucket, options)
-
-
-class _Bucket(object):
- """A wrapper for a GCS bucket as the return value of listbucket."""
-
- def __init__(self, api, path, options):
- """Initialize.
-
- Args:
- api: storage_api instance.
- path: bucket path of form '/bucket'.
- options: a dict of listbucket options. Please see listbucket doc.
- """
- self._init(api, path, options)
-
- def _init(self, api, path, options):
- self._api = api
- self._path = path
- self._options = options.copy()
- self._get_bucket_fut = self._api.get_bucket_async(
- self._path + '?' + urllib.urlencode(self._options))
- self._last_yield = None
- self._new_max_keys = self._options.get('max-keys')
-
- def __getstate__(self):
- options = self._options
- if self._last_yield:
- options['marker'] = self._last_yield.filename[len(self._path) + 1:]
- if self._new_max_keys is not None:
- options['max-keys'] = self._new_max_keys
- return {'api': self._api,
- 'path': self._path,
- 'options': options}
-
- def __setstate__(self, state):
- self._init(state['api'], state['path'], state['options'])
-
- def __iter__(self):
- """Iter over the bucket.
-
- Yields:
- GCSFileStat: a GCSFileStat for an object in the bucket.
- They are ordered by GCSFileStat.filename.
- """
- total = 0
- max_keys = self._options.get('max-keys')
-
- while self._get_bucket_fut:
- status, resp_headers, content = self._get_bucket_fut.get_result()
- errors.check_status(status, [200], self._path, resp_headers=resp_headers,
- body=content, extras=self._options)
-
- if self._should_get_another_batch(content):
- self._get_bucket_fut = self._api.get_bucket_async(
- self._path + '?' + urllib.urlencode(self._options))
- else:
- self._get_bucket_fut = None
-
- root = ET.fromstring(content)
- dirs = self._next_dir_gen(root)
- files = self._next_file_gen(root)
- next_file = files.next()
- next_dir = dirs.next()
-
- while ((max_keys is None or total < max_keys) and
- not (next_file is None and next_dir is None)):
- total += 1
- if next_file is None:
- self._last_yield = next_dir
- next_dir = dirs.next()
- elif next_dir is None:
- self._last_yield = next_file
- next_file = files.next()
- elif next_dir < next_file:
- self._last_yield = next_dir
- next_dir = dirs.next()
- elif next_file < next_dir:
- self._last_yield = next_file
- next_file = files.next()
- else:
- logging.error(
- 'Should never reach. next file is %r. next dir is %r.',
- next_file, next_dir)
- if self._new_max_keys:
- self._new_max_keys -= 1
- yield self._last_yield
-
- def _next_file_gen(self, root):
- """Generator for next file element in the document.
-
- Args:
- root: root element of the XML tree.
-
- Yields:
- GCSFileStat for the next file.
- """
- for e in root.getiterator(common._T_CONTENTS):
- st_ctime, size, etag, key = None, None, None, None
- for child in e.getiterator('*'):
- if child.tag == common._T_LAST_MODIFIED:
- st_ctime = common.dt_str_to_posix(child.text)
- elif child.tag == common._T_ETAG:
- etag = child.text
- elif child.tag == common._T_SIZE:
- size = child.text
- elif child.tag == common._T_KEY:
- key = child.text
- yield common.GCSFileStat(self._path + '/' + key,
- size, etag, st_ctime)
- e.clear()
- yield None
-
- def _next_dir_gen(self, root):
- """Generator for next directory element in the document.
-
- Args:
- root: root element in the XML tree.
-
- Yields:
- GCSFileStat for the next directory.
- """
- for e in root.getiterator(common._T_COMMON_PREFIXES):
- yield common.GCSFileStat(
- self._path + '/' + e.find(common._T_PREFIX).text,
- st_size=None, etag=None, st_ctime=None, is_dir=True)
- e.clear()
- yield None
-
- def _should_get_another_batch(self, content):
- """Whether to issue another GET bucket call.
-
- Args:
- content: response XML.
-
- Returns:
- True if should, also update self._options for the next request.
- False otherwise.
- """
- if ('max-keys' in self._options and
- self._options['max-keys'] <= common._MAX_GET_BUCKET_RESULT):
- return False
-
- elements = self._find_elements(
- content, set([common._T_IS_TRUNCATED,
- common._T_NEXT_MARKER]))
- if elements.get(common._T_IS_TRUNCATED, 'false').lower() != 'true':
- return False
-
- next_marker = elements.get(common._T_NEXT_MARKER)
- if next_marker is None:
- self._options.pop('marker', None)
- return False
- self._options['marker'] = next_marker
- return True
-
- def _find_elements(self, result, elements):
- """Find interesting elements from XML.
-
- This function tries to only look for specified elements
- without parsing the entire XML. The specified elements is better
- located near the beginning.
-
- Args:
- result: response XML.
- elements: a set of interesting element tags.
-
- Returns:
- A dict from element tag to element value.
- """
- element_mapping = {}
- result = StringIO.StringIO(result)
- for _, e in ET.iterparse(result, events=('end',)):
- if not elements:
- break
- if e.tag in elements:
- element_mapping[e.tag] = e.text
- elements.remove(e.tag)
- return element_mapping
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/common.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/common.py
deleted file mode 100755
index ab9c8df3..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/common.py
+++ /dev/null
@@ -1,429 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Helpers shared by cloudstorage_stub and cloudstorage_api."""
-
-
-
-
-
-__all__ = ['CS_XML_NS',
- 'CSFileStat',
- 'dt_str_to_posix',
- 'local_api_url',
- 'LOCAL_GCS_ENDPOINT',
- 'local_run',
- 'get_access_token',
- 'get_stored_content_length',
- 'get_metadata',
- 'GCSFileStat',
- 'http_time_to_posix',
- 'memory_usage',
- 'posix_time_to_http',
- 'posix_to_dt_str',
- 'set_access_token',
- 'validate_options',
- 'validate_bucket_name',
- 'validate_bucket_path',
- 'validate_file_path',
- ]
-
-
-import calendar
-import datetime
-from email import utils as email_utils
-import logging
-import os
-import re
-
-try:
- from google.appengine.api import runtime
-except ImportError:
- from google.appengine.api import runtime
-
-
-_GCS_BUCKET_REGEX_BASE = r'[a-z0-9\.\-_]{3,63}'
-_GCS_BUCKET_REGEX = re.compile(_GCS_BUCKET_REGEX_BASE + r'$')
-_GCS_BUCKET_PATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'$')
-_GCS_PATH_PREFIX_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'.*')
-_GCS_FULLPATH_REGEX = re.compile(r'/' + _GCS_BUCKET_REGEX_BASE + r'/.*')
-_GCS_METADATA = ['x-goog-meta-',
- 'content-disposition',
- 'cache-control',
- 'content-encoding']
-_GCS_OPTIONS = _GCS_METADATA + ['x-goog-acl']
-CS_XML_NS = 'http://doc.s3.amazonaws.com/2006-03-01'
-LOCAL_GCS_ENDPOINT = '/_ah/gcs'
-_access_token = ''
-
-
-_MAX_GET_BUCKET_RESULT = 1000
-
-
-def set_access_token(access_token):
- """Set the shared access token to authenticate with Google Cloud Storage.
-
- When set, the library will always attempt to communicate with the
- real Google Cloud Storage with this token even when running on dev appserver.
- Note the token could expire so it's up to you to renew it.
-
- When absent, the library will automatically request and refresh a token
- on appserver, or when on dev appserver, talk to a Google Cloud Storage
- stub.
-
- Args:
- access_token: you can get one by run 'gsutil -d ls' and copy the
- str after 'Bearer'.
- """
- global _access_token
- _access_token = access_token
-
-
-def get_access_token():
- """Returns the shared access token."""
- return _access_token
-
-
-class GCSFileStat(object):
- """Container for GCS file stat."""
-
- def __init__(self,
- filename,
- st_size,
- etag,
- st_ctime,
- content_type=None,
- metadata=None,
- is_dir=False):
- """Initialize.
-
- For files, the non optional arguments are always set.
- For directories, only filename and is_dir is set.
-
- Args:
- filename: a Google Cloud Storage filename of form '/bucket/filename'.
- st_size: file size in bytes. long compatible.
- etag: hex digest of the md5 hash of the file's content. str.
- st_ctime: posix file creation time. float compatible.
- content_type: content type. str.
- metadata: a str->str dict of user specified options when creating
- the file. Possible keys are x-goog-meta-, content-disposition,
- content-encoding, and cache-control.
- is_dir: True if this represents a directory. False if this is a real file.
- """
- self.filename = filename
- self.is_dir = is_dir
- self.st_size = None
- self.st_ctime = None
- self.etag = None
- self.content_type = content_type
- self.metadata = metadata
-
- if not is_dir:
- self.st_size = long(st_size)
- self.st_ctime = float(st_ctime)
- if etag[0] == '"' and etag[-1] == '"':
- etag = etag[1:-1]
- self.etag = etag
-
- def __repr__(self):
- if self.is_dir:
- return '(directory: %s)' % self.filename
-
- return (
- '(filename: %(filename)s, st_size: %(st_size)s, '
- 'st_ctime: %(st_ctime)s, etag: %(etag)s, '
- 'content_type: %(content_type)s, '
- 'metadata: %(metadata)s)' %
- dict(filename=self.filename,
- st_size=self.st_size,
- st_ctime=self.st_ctime,
- etag=self.etag,
- content_type=self.content_type,
- metadata=self.metadata))
-
- def __cmp__(self, other):
- if not isinstance(other, self.__class__):
- raise ValueError('Argument to cmp must have the same type. '
- 'Expect %s, got %s', self.__class__.__name__,
- other.__class__.__name__)
- if self.filename > other.filename:
- return 1
- elif self.filename < other.filename:
- return -1
- return 0
-
- def __hash__(self):
- if self.etag:
- return hash(self.etag)
- return hash(self.filename)
-
-
-CSFileStat = GCSFileStat
-
-
-def get_stored_content_length(headers):
- """Return the content length (in bytes) of the object as stored in GCS.
-
- x-goog-stored-content-length should always be present except when called via
- the local dev_appserver. Therefore if it is not present we default to the
- standard content-length header.
-
- Args:
- headers: a dict of headers from the http response.
-
- Returns:
- the stored content length.
- """
- length = headers.get('x-goog-stored-content-length')
- if length is None:
- length = headers.get('content-length')
- return length
-
-
-def get_metadata(headers):
- """Get user defined options from HTTP response headers."""
- return dict((k, v) for k, v in headers.iteritems()
- if any(k.lower().startswith(valid) for valid in _GCS_METADATA))
-
-
-def validate_bucket_name(name):
- """Validate a Google Storage bucket name.
-
- Args:
- name: a Google Storage bucket name with no prefix or suffix.
-
- Raises:
- ValueError: if name is invalid.
- """
- _validate_path(name)
- if not _GCS_BUCKET_REGEX.match(name):
- raise ValueError('Bucket should be 3-63 characters long using only a-z,'
- '0-9, underscore, dash or dot but got %s' % name)
-
-
-def validate_bucket_path(path):
- """Validate a Google Cloud Storage bucket path.
-
- Args:
- path: a Google Storage bucket path. It should have form '/bucket'.
-
- Raises:
- ValueError: if path is invalid.
- """
- _validate_path(path)
- if not _GCS_BUCKET_PATH_REGEX.match(path):
- raise ValueError('Bucket should have format /bucket '
- 'but got %s' % path)
-
-
-def validate_file_path(path):
- """Validate a Google Cloud Storage file path.
-
- Args:
- path: a Google Storage file path. It should have form '/bucket/filename'.
-
- Raises:
- ValueError: if path is invalid.
- """
- _validate_path(path)
- if not _GCS_FULLPATH_REGEX.match(path):
- raise ValueError('Path should have format /bucket/filename '
- 'but got %s' % path)
-
-
-def _process_path_prefix(path_prefix):
- """Validate and process a Google Cloud Stoarge path prefix.
-
- Args:
- path_prefix: a Google Cloud Storage path prefix of format '/bucket/prefix'
- or '/bucket/' or '/bucket'.
-
- Raises:
- ValueError: if path is invalid.
-
- Returns:
- a tuple of /bucket and prefix. prefix can be None.
- """
- _validate_path(path_prefix)
- if not _GCS_PATH_PREFIX_REGEX.match(path_prefix):
- raise ValueError('Path prefix should have format /bucket, /bucket/, '
- 'or /bucket/prefix but got %s.' % path_prefix)
- bucket_name_end = path_prefix.find('/', 1)
- bucket = path_prefix
- prefix = None
- if bucket_name_end != -1:
- bucket = path_prefix[:bucket_name_end]
- prefix = path_prefix[bucket_name_end + 1:] or None
- return bucket, prefix
-
-
-def _validate_path(path):
- """Basic validation of Google Storage paths.
-
- Args:
- path: a Google Storage path. It should have form '/bucket/filename'
- or '/bucket'.
-
- Raises:
- ValueError: if path is invalid.
- TypeError: if path is not of type basestring.
- """
- if not path:
- raise ValueError('Path is empty')
- if not isinstance(path, basestring):
- raise TypeError('Path should be a string but is %s (%s).' %
- (path.__class__, path))
-
-
-def validate_options(options):
- """Validate Google Cloud Storage options.
-
- Args:
- options: a str->basestring dict of options to pass to Google Cloud Storage.
-
- Raises:
- ValueError: if option is not supported.
- TypeError: if option is not of type str or value of an option
- is not of type basestring.
- """
- if not options:
- return
-
- for k, v in options.iteritems():
- if not isinstance(k, str):
- raise TypeError('option %r should be a str.' % k)
- if not any(k.lower().startswith(valid) for valid in _GCS_OPTIONS):
- raise ValueError('option %s is not supported.' % k)
- if not isinstance(v, basestring):
- raise TypeError('value %r for option %s should be of type basestring.' %
- (v, k))
-
-
-def http_time_to_posix(http_time):
- """Convert HTTP time format to posix time.
-
- See http://www.w3.org/Protocols/rfc2616/rfc2616-sec3.html#sec3.3.1
- for http time format.
-
- Args:
- http_time: time in RFC 2616 format. e.g.
- "Mon, 20 Nov 1995 19:12:08 GMT".
-
- Returns:
- A float of secs from unix epoch.
- """
- if http_time is not None:
- return email_utils.mktime_tz(email_utils.parsedate_tz(http_time))
-
-
-def posix_time_to_http(posix_time):
- """Convert posix time to HTML header time format.
-
- Args:
- posix_time: unix time.
-
- Returns:
- A datatime str in RFC 2616 format.
- """
- if posix_time:
- return email_utils.formatdate(posix_time, usegmt=True)
-
-
-_DT_FORMAT = '%Y-%m-%dT%H:%M:%S'
-
-
-def dt_str_to_posix(dt_str):
- """format str to posix.
-
- datetime str is of format %Y-%m-%dT%H:%M:%S.%fZ,
- e.g. 2013-04-12T00:22:27.978Z. According to ISO 8601, T is a separator
- between date and time when they are on the same line.
- Z indicates UTC (zero meridian).
-
- A pointer: http://www.cl.cam.ac.uk/~mgk25/iso-time.html
-
- This is used to parse LastModified node from GCS's GET bucket XML response.
-
- Args:
- dt_str: A datetime str.
-
- Returns:
- A float of secs from unix epoch. By posix definition, epoch is midnight
- 1970/1/1 UTC.
- """
- parsable, _ = dt_str.split('.')
- dt = datetime.datetime.strptime(parsable, _DT_FORMAT)
- return calendar.timegm(dt.utctimetuple())
-
-
-def posix_to_dt_str(posix):
- """Reverse of str_to_datetime.
-
- This is used by GCS stub to generate GET bucket XML response.
-
- Args:
- posix: A float of secs from unix epoch.
-
- Returns:
- A datetime str.
- """
- dt = datetime.datetime.utcfromtimestamp(posix)
- dt_str = dt.strftime(_DT_FORMAT)
- return dt_str + '.000Z'
-
-
-def local_run():
- """Whether we should hit GCS dev appserver stub."""
- server_software = os.environ.get('SERVER_SOFTWARE')
- if server_software is None:
- return True
- if 'remote_api' in server_software:
- return False
- if server_software.startswith(('Development', 'testutil')):
- return True
- return False
-
-
-def local_api_url():
- """Return URL for GCS emulation on dev appserver."""
- return 'http://%s%s' % (os.environ.get('HTTP_HOST'), LOCAL_GCS_ENDPOINT)
-
-
-def memory_usage(method):
- """Log memory usage before and after a method."""
- def wrapper(*args, **kwargs):
- logging.info('Memory before method %s is %s.',
- method.__name__, runtime.memory_usage().current())
- result = method(*args, **kwargs)
- logging.info('Memory after method %s is %s',
- method.__name__, runtime.memory_usage().current())
- return result
- return wrapper
-
-
-def _add_ns(tagname):
- return '{%(ns)s}%(tag)s' % {'ns': CS_XML_NS,
- 'tag': tagname}
-
-
-_T_CONTENTS = _add_ns('Contents')
-_T_LAST_MODIFIED = _add_ns('LastModified')
-_T_ETAG = _add_ns('ETag')
-_T_KEY = _add_ns('Key')
-_T_SIZE = _add_ns('Size')
-_T_PREFIX = _add_ns('Prefix')
-_T_COMMON_PREFIXES = _add_ns('CommonPrefixes')
-_T_NEXT_MARKER = _add_ns('NextMarker')
-_T_IS_TRUNCATED = _add_ns('IsTruncated')
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/errors.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/errors.py
deleted file mode 100755
index 21743806..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/errors.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Google Cloud Storage specific Files API calls."""
-
-
-
-
-
-__all__ = ['AuthorizationError',
- 'check_status',
- 'Error',
- 'FatalError',
- 'FileClosedError',
- 'ForbiddenError',
- 'InvalidRange',
- 'NotFoundError',
- 'ServerError',
- 'TimeoutError',
- 'TransientError',
- ]
-
-import httplib
-
-
-class Error(Exception):
- """Base error for all gcs operations.
-
- Error can happen on GAE side or GCS server side.
- For details on a particular GCS HTTP response code, see
- https://developers.google.com/storage/docs/reference-status#standardcodes
- """
-
-
-class TransientError(Error):
- """TransientError could be retried."""
-
-
-class TimeoutError(TransientError):
- """HTTP 408 timeout."""
-
-
-class FatalError(Error):
- """FatalError shouldn't be retried."""
-
-
-class FileClosedError(FatalError):
- """File is already closed.
-
- This can happen when the upload has finished but 'write' is called on
- a stale upload handle.
- """
-
-
-class NotFoundError(FatalError):
- """HTTP 404 resource not found."""
-
-
-class ForbiddenError(FatalError):
- """HTTP 403 Forbidden.
-
- While GCS replies with a 403 error for many reasons, the most common one
- is due to bucket permission not correctly setup for your app to access.
- """
-
-
-class AuthorizationError(FatalError):
- """HTTP 401 authentication required.
-
- Unauthorized request has been received by GCS.
-
- This error is mostly handled by GCS client. GCS client will request
- a new access token and retry the request.
- """
-
-
-class InvalidRange(FatalError):
- """HTTP 416 RequestRangeNotSatifiable."""
-
-
-class ServerError(TransientError):
- """HTTP >= 500 server side error."""
-
-
-def check_status(status, expected, path, headers=None,
- resp_headers=None, body=None, extras=None):
- """Check HTTP response status is expected.
-
- Args:
- status: HTTP response status. int.
- expected: a list of expected statuses. A list of ints.
- path: filename or a path prefix.
- headers: HTTP request headers.
- resp_headers: HTTP response headers.
- body: HTTP response body.
- extras: extra info to be logged verbatim if error occurs.
-
- Raises:
- AuthorizationError: if authorization failed.
- NotFoundError: if an object that's expected to exist doesn't.
- TimeoutError: if HTTP request timed out.
- ServerError: if server experienced some errors.
- FatalError: if any other unexpected errors occurred.
- """
- if status in expected:
- return
-
- msg = ('Expect status %r from Google Storage. But got status %d.\n'
- 'Path: %r.\n'
- 'Request headers: %r.\n'
- 'Response headers: %r.\n'
- 'Body: %r.\n'
- 'Extra info: %r.\n' %
- (expected, status, path, headers, resp_headers, body, extras))
-
- if status == httplib.UNAUTHORIZED:
- raise AuthorizationError(msg)
- elif status == httplib.FORBIDDEN:
- raise ForbiddenError(msg)
- elif status == httplib.NOT_FOUND:
- raise NotFoundError(msg)
- elif status == httplib.REQUEST_TIMEOUT:
- raise TimeoutError(msg)
- elif status == httplib.REQUESTED_RANGE_NOT_SATISFIABLE:
- raise InvalidRange(msg)
- elif (status == httplib.OK and 308 in expected and
- httplib.OK not in expected):
- raise FileClosedError(msg)
- elif status >= 500:
- raise ServerError(msg)
- else:
- raise FatalError(msg)
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/rest_api.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/rest_api.py
deleted file mode 100755
index 437c09d7..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/rest_api.py
+++ /dev/null
@@ -1,258 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Base and helper classes for Google RESTful APIs."""
-
-
-
-
-
-__all__ = ['add_sync_methods']
-
-import random
-import time
-
-from . import api_utils
-
-try:
- from google.appengine.api import app_identity
- from google.appengine.ext import ndb
-except ImportError:
- from google.appengine.api import app_identity
- from google.appengine.ext import ndb
-
-
-
-def _make_sync_method(name):
- """Helper to synthesize a synchronous method from an async method name.
-
- Used by the @add_sync_methods class decorator below.
-
- Args:
- name: The name of the synchronous method.
-
- Returns:
- A method (with first argument 'self') that retrieves and calls
- self., passing its own arguments, expects it to return a
- Future, and then waits for and returns that Future's result.
- """
-
- def sync_wrapper(self, *args, **kwds):
- method = getattr(self, name)
- future = method(*args, **kwds)
- return future.get_result()
-
- return sync_wrapper
-
-
-def add_sync_methods(cls):
- """Class decorator to add synchronous methods corresponding to async methods.
-
- This modifies the class in place, adding additional methods to it.
- If a synchronous method of a given name already exists it is not
- replaced.
-
- Args:
- cls: A class.
-
- Returns:
- The same class, modified in place.
- """
- for name in cls.__dict__.keys():
- if name.endswith('_async'):
- sync_name = name[:-6]
- if not hasattr(cls, sync_name):
- setattr(cls, sync_name, _make_sync_method(name))
- return cls
-
-
-class _AE_TokenStorage_(ndb.Model):
- """Entity to store app_identity tokens in memcache."""
-
- token = ndb.StringProperty()
- expires = ndb.FloatProperty()
-
-
-@ndb.tasklet
-def _make_token_async(scopes, service_account_id):
- """Get a fresh authentication token.
-
- Args:
- scopes: A list of scopes.
- service_account_id: Internal-use only.
-
- Raises:
- An ndb.Return with a tuple (token, expiration_time) where expiration_time is
- seconds since the epoch.
- """
- rpc = app_identity.create_rpc()
- app_identity.make_get_access_token_call(rpc, scopes, service_account_id)
- token, expires_at = yield rpc
- raise ndb.Return((token, expires_at))
-
-
-class _RestApi(object):
- """Base class for REST-based API wrapper classes.
-
- This class manages authentication tokens and request retries. All
- APIs are available as synchronous and async methods; synchronous
- methods are synthesized from async ones by the add_sync_methods()
- function in this module.
-
- WARNING: Do NOT directly use this api. It's an implementation detail
- and is subject to change at any release.
- """
-
- def __init__(self, scopes, service_account_id=None, token_maker=None,
- retry_params=None):
- """Constructor.
-
- Args:
- scopes: A scope or a list of scopes.
- service_account_id: Internal use only.
- token_maker: An asynchronous function of the form
- (scopes, service_account_id) -> (token, expires).
- retry_params: An instance of api_utils.RetryParams. If None, the
- default for current thread will be used.
- """
-
- if isinstance(scopes, basestring):
- scopes = [scopes]
- self.scopes = scopes
- self.service_account_id = service_account_id
- self.make_token_async = token_maker or _make_token_async
- if not retry_params:
- retry_params = api_utils._get_default_retry_params()
- self.retry_params = retry_params
- self.user_agent = {'User-Agent': retry_params._user_agent}
- self.expiration_headroom = random.randint(60, 240)
-
- def __getstate__(self):
- """Store state as part of serialization/pickling."""
- return {'scopes': self.scopes,
- 'id': self.service_account_id,
- 'a_maker': (None if self.make_token_async == _make_token_async
- else self.make_token_async),
- 'retry_params': self.retry_params,
- 'expiration_headroom': self.expiration_headroom}
-
- def __setstate__(self, state):
- """Restore state as part of deserialization/unpickling."""
- self.__init__(state['scopes'],
- service_account_id=state['id'],
- token_maker=state['a_maker'],
- retry_params=state['retry_params'])
- self.expiration_headroom = state['expiration_headroom']
-
- @ndb.tasklet
- def do_request_async(self, url, method='GET', headers=None, payload=None,
- deadline=None, callback=None):
- """Issue one HTTP request.
-
- It performs async retries using tasklets.
-
- Args:
- url: the url to fetch.
- method: the method in which to fetch.
- headers: the http headers.
- payload: the data to submit in the fetch.
- deadline: the deadline in which to make the call.
- callback: the call to make once completed.
-
- Yields:
- The async fetch of the url.
- """
- retry_wrapper = api_utils._RetryWrapper(
- self.retry_params,
- retriable_exceptions=api_utils._RETRIABLE_EXCEPTIONS,
- should_retry=api_utils._should_retry)
- resp = yield retry_wrapper.run(
- self.urlfetch_async,
- url=url,
- method=method,
- headers=headers,
- payload=payload,
- deadline=deadline,
- callback=callback,
- follow_redirects=False)
- raise ndb.Return((resp.status_code, resp.headers, resp.content))
-
- @ndb.tasklet
- def get_token_async(self, refresh=False):
- """Get an authentication token.
-
- The token is cached in memcache, keyed by the scopes argument.
- Uses a random token expiration headroom value generated in the constructor
- to eliminate a burst of GET_ACCESS_TOKEN API requests.
-
- Args:
- refresh: If True, ignore a cached token; default False.
-
- Yields:
- An authentication token. This token is guaranteed to be non-expired.
- """
- key = '%s,%s' % (self.service_account_id, ','.join(self.scopes))
- ts = yield _AE_TokenStorage_.get_by_id_async(
- key, use_cache=True, use_memcache=True,
- use_datastore=self.retry_params.save_access_token)
- if refresh or ts is None or ts.expires < (
- time.time() + self.expiration_headroom):
- token, expires_at = yield self.make_token_async(
- self.scopes, self.service_account_id)
- timeout = int(expires_at - time.time())
- ts = _AE_TokenStorage_(id=key, token=token, expires=expires_at)
- if timeout > 0:
- yield ts.put_async(memcache_timeout=timeout,
- use_datastore=self.retry_params.save_access_token,
- use_cache=True, use_memcache=True)
- raise ndb.Return(ts.token)
-
- @ndb.tasklet
- def urlfetch_async(self, url, method='GET', headers=None,
- payload=None, deadline=None, callback=None,
- follow_redirects=False):
- """Make an async urlfetch() call.
-
- This is an async wrapper around urlfetch(). It adds an authentication
- header.
-
- Args:
- url: the url to fetch.
- method: the method in which to fetch.
- headers: the http headers.
- payload: the data to submit in the fetch.
- deadline: the deadline in which to make the call.
- callback: the call to make once completed.
- follow_redirects: whether or not to follow redirects.
-
- Yields:
- This returns a Future despite not being decorated with @ndb.tasklet!
- """
- headers = {} if headers is None else dict(headers)
- headers.update(self.user_agent)
- self.token = yield self.get_token_async()
- if self.token:
- headers['authorization'] = 'OAuth ' + self.token
-
- deadline = deadline or self.retry_params.urlfetch_timeout
-
- ctx = ndb.get_context()
- resp = yield ctx.urlfetch(
- url, payload=payload, method=method,
- headers=headers, follow_redirects=follow_redirects,
- deadline=deadline, callback=callback)
- raise ndb.Return(resp)
-
-
-_RestApi = add_sync_methods(_RestApi)
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/storage_api.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/storage_api.py
deleted file mode 100755
index 910c365b..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/storage_api.py
+++ /dev/null
@@ -1,887 +0,0 @@
-# Copyright 2012 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Python wrappers for the Google Storage RESTful API."""
-
-
-
-
-
-__all__ = ['ReadBuffer',
- 'StreamingBuffer',
- ]
-
-import collections
-import os
-import urlparse
-
-from . import api_utils
-from . import common
-from . import errors
-from . import rest_api
-
-try:
- from google.appengine.api import urlfetch
- from google.appengine.ext import ndb
-except ImportError:
- from google.appengine.api import urlfetch
- from google.appengine.ext import ndb
-
-
-
-def _get_storage_api(retry_params, account_id=None):
- """Returns storage_api instance for API methods.
-
- Args:
- retry_params: An instance of api_utils.RetryParams. If none,
- thread's default will be used.
- account_id: Internal-use only.
-
- Returns:
- A storage_api instance to handle urlfetch work to GCS.
- On dev appserver, this instance by default will talk to a local stub
- unless common.ACCESS_TOKEN is set. That token will be used to talk
- to the real GCS.
- """
-
-
- api = _StorageApi(_StorageApi.full_control_scope,
- service_account_id=account_id,
- retry_params=retry_params)
- if common.local_run() and not common.get_access_token():
- api.api_url = common.local_api_url()
- if common.get_access_token():
- api.token = common.get_access_token()
- return api
-
-
-class _StorageApi(rest_api._RestApi):
- """A simple wrapper for the Google Storage RESTful API.
-
- WARNING: Do NOT directly use this api. It's an implementation detail
- and is subject to change at any release.
-
- All async methods have similar args and returns.
-
- Args:
- path: The path to the Google Storage object or bucket, e.g.
- '/mybucket/myfile' or '/mybucket'.
- **kwd: Options for urlfetch. e.g.
- headers={'content-type': 'text/plain'}, payload='blah'.
-
- Returns:
- A ndb Future. When fulfilled, future.get_result() should return
- a tuple of (status, headers, content) that represents a HTTP response
- of Google Cloud Storage XML API.
- """
-
- api_url = 'https://storage.googleapis.com'
- read_only_scope = 'https://www.googleapis.com/auth/devstorage.read_only'
- read_write_scope = 'https://www.googleapis.com/auth/devstorage.read_write'
- full_control_scope = 'https://www.googleapis.com/auth/devstorage.full_control'
-
- def __getstate__(self):
- """Store state as part of serialization/pickling.
-
- Returns:
- A tuple (of dictionaries) with the state of this object
- """
- return (super(_StorageApi, self).__getstate__(), {'api_url': self.api_url})
-
- def __setstate__(self, state):
- """Restore state as part of deserialization/unpickling.
-
- Args:
- state: the tuple from a __getstate__ call
- """
- superstate, localstate = state
- super(_StorageApi, self).__setstate__(superstate)
- self.api_url = localstate['api_url']
-
- @api_utils._eager_tasklet
- @ndb.tasklet
- def do_request_async(self, url, method='GET', headers=None, payload=None,
- deadline=None, callback=None):
- """Inherit docs.
-
- This method translates urlfetch exceptions to more service specific ones.
- """
- if headers is None:
- headers = {}
- if 'x-goog-api-version' not in headers:
- headers['x-goog-api-version'] = '2'
- headers['accept-encoding'] = 'gzip, *'
- try:
- resp_tuple = yield super(_StorageApi, self).do_request_async(
- url, method=method, headers=headers, payload=payload,
- deadline=deadline, callback=callback)
- except urlfetch.DownloadError, e:
- raise errors.TimeoutError(
- 'Request to Google Cloud Storage timed out.', e)
-
- raise ndb.Return(resp_tuple)
-
-
- def post_object_async(self, path, **kwds):
- """POST to an object."""
- return self.do_request_async(self.api_url + path, 'POST', **kwds)
-
- def put_object_async(self, path, **kwds):
- """PUT an object."""
- return self.do_request_async(self.api_url + path, 'PUT', **kwds)
-
- def get_object_async(self, path, **kwds):
- """GET an object.
-
- Note: No payload argument is supported.
- """
- return self.do_request_async(self.api_url + path, 'GET', **kwds)
-
- def delete_object_async(self, path, **kwds):
- """DELETE an object.
-
- Note: No payload argument is supported.
- """
- return self.do_request_async(self.api_url + path, 'DELETE', **kwds)
-
- def head_object_async(self, path, **kwds):
- """HEAD an object.
-
- Depending on request headers, HEAD returns various object properties,
- e.g. Content-Length, Last-Modified, and ETag.
-
- Note: No payload argument is supported.
- """
- return self.do_request_async(self.api_url + path, 'HEAD', **kwds)
-
- def get_bucket_async(self, path, **kwds):
- """GET a bucket."""
- return self.do_request_async(self.api_url + path, 'GET', **kwds)
-
-
-_StorageApi = rest_api.add_sync_methods(_StorageApi)
-
-
-class ReadBuffer(object):
- """A class for reading Google storage files."""
-
- DEFAULT_BUFFER_SIZE = 1024 * 1024
- MAX_REQUEST_SIZE = 30 * DEFAULT_BUFFER_SIZE
-
- def __init__(self,
- api,
- path,
- buffer_size=DEFAULT_BUFFER_SIZE,
- max_request_size=MAX_REQUEST_SIZE):
- """Constructor.
-
- Args:
- api: A StorageApi instance.
- path: Quoted/escaped path to the object, e.g. /mybucket/myfile
- buffer_size: buffer size. The ReadBuffer keeps
- one buffer. But there may be a pending future that contains
- a second buffer. This size must be less than max_request_size.
- max_request_size: Max bytes to request in one urlfetch.
- """
- self._api = api
- self._path = path
- self.name = api_utils._unquote_filename(path)
- self.closed = False
-
- assert buffer_size <= max_request_size
- self._buffer_size = buffer_size
- self._max_request_size = max_request_size
- self._offset = 0
- self._buffer = _Buffer()
- self._etag = None
-
- get_future = self._get_segment(0, self._buffer_size, check_response=False)
-
- status, headers, content = self._api.head_object(path)
- errors.check_status(status, [200], path, resp_headers=headers, body=content)
- self._file_size = long(common.get_stored_content_length(headers))
- self._check_etag(headers.get('etag'))
-
- self._buffer_future = None
-
- if self._file_size != 0:
- content, check_response_closure = get_future.get_result()
- check_response_closure()
- self._buffer.reset(content)
- self._request_next_buffer()
-
- def __getstate__(self):
- """Store state as part of serialization/pickling.
-
- The contents of the read buffer are not stored, only the current offset for
- data read by the client. A new read buffer is established at unpickling.
- The head information for the object (file size and etag) are stored to
- reduce startup and ensure the file has not changed.
-
- Returns:
- A dictionary with the state of this object
- """
- return {'api': self._api,
- 'path': self._path,
- 'buffer_size': self._buffer_size,
- 'request_size': self._max_request_size,
- 'etag': self._etag,
- 'size': self._file_size,
- 'offset': self._offset,
- 'closed': self.closed}
-
- def __setstate__(self, state):
- """Restore state as part of deserialization/unpickling.
-
- Args:
- state: the dictionary from a __getstate__ call
-
- Along with restoring the state, pre-fetch the next read buffer.
- """
- self._api = state['api']
- self._path = state['path']
- self.name = api_utils._unquote_filename(self._path)
- self._buffer_size = state['buffer_size']
- self._max_request_size = state['request_size']
- self._etag = state['etag']
- self._file_size = state['size']
- self._offset = state['offset']
- self._buffer = _Buffer()
- self.closed = state['closed']
- self._buffer_future = None
- if self._remaining() and not self.closed:
- self._request_next_buffer()
-
- def __iter__(self):
- """Iterator interface.
-
- Note the ReadBuffer container itself is the iterator. It's
- (quote PEP0234)
- 'destructive: they consumes all the values and a second iterator
- cannot easily be created that iterates independently over the same values.
- You could open the file for the second time, or seek() to the beginning.'
-
- Returns:
- Self.
- """
- return self
-
- def next(self):
- line = self.readline()
- if not line:
- raise StopIteration()
- return line
-
- def readline(self, size=-1):
- """Read one line delimited by '\n' from the file.
-
- A trailing newline character is kept in the string. It may be absent when a
- file ends with an incomplete line. If the size argument is non-negative,
- it specifies the maximum string size (counting the newline) to return.
- A negative size is the same as unspecified. Empty string is returned
- only when EOF is encountered immediately.
-
- Args:
- size: Maximum number of bytes to read. If not specified, readline stops
- only on '\n' or EOF.
-
- Returns:
- The data read as a string.
-
- Raises:
- IOError: When this buffer is closed.
- """
- self._check_open()
- if size == 0 or not self._remaining():
- return ''
-
- data_list = []
- newline_offset = self._buffer.find_newline(size)
- while newline_offset < 0:
- data = self._buffer.read(size)
- size -= len(data)
- self._offset += len(data)
- data_list.append(data)
- if size == 0 or not self._remaining():
- return ''.join(data_list)
- self._buffer.reset(self._buffer_future.get_result())
- self._request_next_buffer()
- newline_offset = self._buffer.find_newline(size)
-
- data = self._buffer.read_to_offset(newline_offset + 1)
- self._offset += len(data)
- data_list.append(data)
-
- return ''.join(data_list)
-
- def read(self, size=-1):
- """Read data from RAW file.
-
- Args:
- size: Number of bytes to read as integer. Actual number of bytes
- read is always equal to size unless EOF is reached. If size is
- negative or unspecified, read the entire file.
-
- Returns:
- data read as str.
-
- Raises:
- IOError: When this buffer is closed.
- """
- self._check_open()
- if not self._remaining():
- return ''
-
- data_list = []
- while True:
- remaining = self._buffer.remaining()
- if size >= 0 and size < remaining:
- data_list.append(self._buffer.read(size))
- self._offset += size
- break
- else:
- size -= remaining
- self._offset += remaining
- data_list.append(self._buffer.read())
-
- if self._buffer_future is None:
- if size < 0 or size >= self._remaining():
- needs = self._remaining()
- else:
- needs = size
- data_list.extend(self._get_segments(self._offset, needs))
- self._offset += needs
- break
-
- if self._buffer_future:
- self._buffer.reset(self._buffer_future.get_result())
- self._buffer_future = None
-
- if self._buffer_future is None:
- self._request_next_buffer()
- return ''.join(data_list)
-
- def _remaining(self):
- return self._file_size - self._offset
-
- def _request_next_buffer(self):
- """Request next buffer.
-
- Requires self._offset and self._buffer are in consistent state.
- """
- self._buffer_future = None
- next_offset = self._offset + self._buffer.remaining()
- if next_offset != self._file_size:
- self._buffer_future = self._get_segment(next_offset,
- self._buffer_size)
-
- def _get_segments(self, start, request_size):
- """Get segments of the file from Google Storage as a list.
-
- A large request is broken into segments to avoid hitting urlfetch
- response size limit. Each segment is returned from a separate urlfetch.
-
- Args:
- start: start offset to request. Inclusive. Have to be within the
- range of the file.
- request_size: number of bytes to request.
-
- Returns:
- A list of file segments in order
- """
- if not request_size:
- return []
-
- end = start + request_size
- futures = []
-
- while request_size > self._max_request_size:
- futures.append(self._get_segment(start, self._max_request_size))
- request_size -= self._max_request_size
- start += self._max_request_size
- if start < end:
- futures.append(self._get_segment(start, end-start))
- return [fut.get_result() for fut in futures]
-
- @ndb.tasklet
- def _get_segment(self, start, request_size, check_response=True):
- """Get a segment of the file from Google Storage.
-
- Args:
- start: start offset of the segment. Inclusive. Have to be within the
- range of the file.
- request_size: number of bytes to request. Have to be small enough
- for a single urlfetch request. May go over the logical range of the
- file.
- check_response: True to check the validity of GCS response automatically
- before the future returns. False otherwise. See Yields section.
-
- Yields:
- If check_response is True, the segment [start, start + request_size)
- of the file.
- Otherwise, a tuple. The first element is the unverified file segment.
- The second element is a closure that checks response. Caller should
- first invoke the closure before consuing the file segment.
-
- Raises:
- ValueError: if the file has changed while reading.
- """
- end = start + request_size - 1
- content_range = '%d-%d' % (start, end)
- headers = {'Range': 'bytes=' + content_range}
- status, resp_headers, content = yield self._api.get_object_async(
- self._path, headers=headers)
- def _checker():
- errors.check_status(status, [200, 206], self._path, headers,
- resp_headers, body=content)
- self._check_etag(resp_headers.get('etag'))
- if check_response:
- _checker()
- raise ndb.Return(content)
- raise ndb.Return(content, _checker)
-
- def _check_etag(self, etag):
- """Check if etag is the same across requests to GCS.
-
- If self._etag is None, set it. If etag is set, check that the new
- etag equals the old one.
-
- In the __init__ method, we fire one HEAD and one GET request using
- ndb tasklet. One of them would return first and set the first value.
-
- Args:
- etag: etag from a GCS HTTP response. None if etag is not part of the
- response header. It could be None for example in the case of GCS
- composite file.
-
- Raises:
- ValueError: if two etags are not equal.
- """
- if etag is None:
- return
- elif self._etag is None:
- self._etag = etag
- elif self._etag != etag:
- raise ValueError('File on GCS has changed while reading.')
-
- def close(self):
- self.closed = True
- self._buffer = None
- self._buffer_future = None
-
- def __enter__(self):
- return self
-
- def __exit__(self, atype, value, traceback):
- self.close()
- return False
-
- def seek(self, offset, whence=os.SEEK_SET):
- """Set the file's current offset.
-
- Note if the new offset is out of bound, it is adjusted to either 0 or EOF.
-
- Args:
- offset: seek offset as number.
- whence: seek mode. Supported modes are os.SEEK_SET (absolute seek),
- os.SEEK_CUR (seek relative to the current position), and os.SEEK_END
- (seek relative to the end, offset should be negative).
-
- Raises:
- IOError: When this buffer is closed.
- ValueError: When whence is invalid.
- """
- self._check_open()
-
- self._buffer.reset()
- self._buffer_future = None
-
- if whence == os.SEEK_SET:
- self._offset = offset
- elif whence == os.SEEK_CUR:
- self._offset += offset
- elif whence == os.SEEK_END:
- self._offset = self._file_size + offset
- else:
- raise ValueError('Whence mode %s is invalid.' % str(whence))
-
- self._offset = min(self._offset, self._file_size)
- self._offset = max(self._offset, 0)
- if self._remaining():
- self._request_next_buffer()
-
- def tell(self):
- """Tell the file's current offset.
-
- Returns:
- current offset in reading this file.
-
- Raises:
- IOError: When this buffer is closed.
- """
- self._check_open()
- return self._offset
-
- def _check_open(self):
- if self.closed:
- raise IOError('Buffer is closed.')
-
- def seekable(self):
- return True
-
- def readable(self):
- return True
-
- def writable(self):
- return False
-
-
-class _Buffer(object):
- """In memory buffer."""
-
- def __init__(self):
- self.reset()
-
- def reset(self, content='', offset=0):
- self._buffer = content
- self._offset = offset
-
- def read(self, size=-1):
- """Returns bytes from self._buffer and update related offsets.
-
- Args:
- size: number of bytes to read starting from current offset.
- Read the entire buffer if negative.
-
- Returns:
- Requested bytes from buffer.
- """
- if size < 0:
- offset = len(self._buffer)
- else:
- offset = self._offset + size
- return self.read_to_offset(offset)
-
- def read_to_offset(self, offset):
- """Returns bytes from self._buffer and update related offsets.
-
- Args:
- offset: read from current offset to this offset, exclusive.
-
- Returns:
- Requested bytes from buffer.
- """
- assert offset >= self._offset
- result = self._buffer[self._offset: offset]
- self._offset += len(result)
- return result
-
- def remaining(self):
- return len(self._buffer) - self._offset
-
- def find_newline(self, size=-1):
- """Search for newline char in buffer starting from current offset.
-
- Args:
- size: number of bytes to search. -1 means all.
-
- Returns:
- offset of newline char in buffer. -1 if doesn't exist.
- """
- if size < 0:
- return self._buffer.find('\n', self._offset)
- return self._buffer.find('\n', self._offset, self._offset + size)
-
-
-class StreamingBuffer(object):
- """A class for creating large objects using the 'resumable' API.
-
- The API is a subset of the Python writable stream API sufficient to
- support writing zip files using the zipfile module.
-
- The exact sequence of calls and use of headers is documented at
- https://developers.google.com/storage/docs/developer-guide#unknownresumables
- """
-
- _blocksize = 256 * 1024
-
- _flushsize = 8 * _blocksize
-
- _maxrequestsize = 9 * 4 * _blocksize
-
- def __init__(self,
- api,
- path,
- content_type=None,
- gcs_headers=None):
- """Constructor.
-
- Args:
- api: A StorageApi instance.
- path: Quoted/escaped path to the object, e.g. /mybucket/myfile
- content_type: Optional content-type; Default value is
- delegate to Google Cloud Storage.
- gcs_headers: additional gs headers as a str->str dict, e.g
- {'x-goog-acl': 'private', 'x-goog-meta-foo': 'foo'}.
- Raises:
- IOError: When this location can not be found.
- """
- assert self._maxrequestsize > self._blocksize
- assert self._maxrequestsize % self._blocksize == 0
- assert self._maxrequestsize >= self._flushsize
-
- self._api = api
- self._path = path
-
- self.name = api_utils._unquote_filename(path)
- self.closed = False
-
- self._buffer = collections.deque()
- self._buffered = 0
- self._written = 0
- self._offset = 0
-
- headers = {'x-goog-resumable': 'start'}
- if content_type:
- headers['content-type'] = content_type
- if gcs_headers:
- headers.update(gcs_headers)
- status, resp_headers, content = self._api.post_object(path, headers=headers)
- errors.check_status(status, [201], path, headers, resp_headers,
- body=content)
- loc = resp_headers.get('location')
- if not loc:
- raise IOError('No location header found in 201 response')
- parsed = urlparse.urlparse(loc)
- self._path_with_token = '%s?%s' % (self._path, parsed.query)
-
- def __getstate__(self):
- """Store state as part of serialization/pickling.
-
- The contents of the write buffer are stored. Writes to the underlying
- storage are required to be on block boundaries (_blocksize) except for the
- last write. In the worst case the pickled version of this object may be
- slightly larger than the blocksize.
-
- Returns:
- A dictionary with the state of this object
-
- """
- return {'api': self._api,
- 'path': self._path,
- 'path_token': self._path_with_token,
- 'buffer': self._buffer,
- 'buffered': self._buffered,
- 'written': self._written,
- 'offset': self._offset,
- 'closed': self.closed}
-
- def __setstate__(self, state):
- """Restore state as part of deserialization/unpickling.
-
- Args:
- state: the dictionary from a __getstate__ call
- """
- self._api = state['api']
- self._path_with_token = state['path_token']
- self._buffer = state['buffer']
- self._buffered = state['buffered']
- self._written = state['written']
- self._offset = state['offset']
- self.closed = state['closed']
- self._path = state['path']
- self.name = api_utils._unquote_filename(self._path)
-
- def write(self, data):
- """Write some bytes.
-
- Args:
- data: data to write. str.
-
- Raises:
- TypeError: if data is not of type str.
- """
- self._check_open()
- if not isinstance(data, str):
- raise TypeError('Expected str but got %s.' % type(data))
- if not data:
- return
- self._buffer.append(data)
- self._buffered += len(data)
- self._offset += len(data)
- if self._buffered >= self._flushsize:
- self._flush()
-
- def flush(self):
- """Flush as much as possible to GCS.
-
- GCS *requires* that all writes except for the final one align on
- 256KB boundaries. So the internal buffer may still have < 256KB bytes left
- after flush.
- """
- self._check_open()
- self._flush(finish=False)
-
- def tell(self):
- """Return the total number of bytes passed to write() so far.
-
- (There is no seek() method.)
- """
- return self._offset
-
- def close(self):
- """Flush the buffer and finalize the file.
-
- When this returns the new file is available for reading.
- """
- if not self.closed:
- self.closed = True
- self._flush(finish=True)
- self._buffer = None
-
- def __enter__(self):
- return self
-
- def __exit__(self, atype, value, traceback):
- self.close()
- return False
-
- def _flush(self, finish=False):
- """Internal API to flush.
-
- Buffer is flushed to GCS only when the total amount of buffered data is at
- least self._blocksize, or to flush the final (incomplete) block of
- the file with finish=True.
- """
- while ((finish and self._buffered >= 0) or
- (not finish and self._buffered >= self._blocksize)):
- tmp_buffer = []
- tmp_buffer_len = 0
-
- excess = 0
- while self._buffer:
- buf = self._buffer.popleft()
- size = len(buf)
- self._buffered -= size
- tmp_buffer.append(buf)
- tmp_buffer_len += size
- if tmp_buffer_len >= self._maxrequestsize:
- excess = tmp_buffer_len - self._maxrequestsize
- break
- if not finish and (
- tmp_buffer_len % self._blocksize + self._buffered <
- self._blocksize):
- excess = tmp_buffer_len % self._blocksize
- break
-
- if excess:
- over = tmp_buffer.pop()
- size = len(over)
- assert size >= excess
- tmp_buffer_len -= size
- head, tail = over[:-excess], over[-excess:]
- self._buffer.appendleft(tail)
- self._buffered += len(tail)
- if head:
- tmp_buffer.append(head)
- tmp_buffer_len += len(head)
-
- data = ''.join(tmp_buffer)
- file_len = '*'
- if finish and not self._buffered:
- file_len = self._written + len(data)
- self._send_data(data, self._written, file_len)
- self._written += len(data)
- if file_len != '*':
- break
-
- def _send_data(self, data, start_offset, file_len):
- """Send the block to the storage service.
-
- This is a utility method that does not modify self.
-
- Args:
- data: data to send in str.
- start_offset: start offset of the data in relation to the file.
- file_len: an int if this is the last data to append to the file.
- Otherwise '*'.
- """
- headers = {}
- end_offset = start_offset + len(data) - 1
-
- if data:
- headers['content-range'] = ('bytes %d-%d/%s' %
- (start_offset, end_offset, file_len))
- else:
- headers['content-range'] = ('bytes */%s' % file_len)
-
- status, response_headers, content = self._api.put_object(
- self._path_with_token, payload=data, headers=headers)
- if file_len == '*':
- expected = 308
- else:
- expected = 200
- errors.check_status(status, [expected], self._path, headers,
- response_headers, content,
- {'upload_path': self._path_with_token})
-
- def _get_offset_from_gcs(self):
- """Get the last offset that has been written to GCS.
-
- This is a utility method that does not modify self.
-
- Returns:
- an int of the last offset written to GCS by this upload, inclusive.
- -1 means nothing has been written.
- """
- headers = {'content-range': 'bytes */*'}
- status, response_headers, content = self._api.put_object(
- self._path_with_token, headers=headers)
- errors.check_status(status, [308], self._path, headers,
- response_headers, content,
- {'upload_path': self._path_with_token})
- val = response_headers.get('range')
- if val is None:
- return -1
- _, offset = val.rsplit('-', 1)
- return int(offset)
-
- def _force_close(self, file_length=None):
- """Close this buffer on file_length.
-
- Finalize this upload immediately on file_length.
- Contents that are still in memory will not be uploaded.
-
- This is a utility method that does not modify self.
-
- Args:
- file_length: file length. Must match what has been uploaded. If None,
- it will be queried from GCS.
- """
- if file_length is None:
- file_length = self._get_offset_from_gcs() + 1
- self._send_data('', 0, file_length)
-
- def _check_open(self):
- if self.closed:
- raise IOError('Buffer is closed.')
-
- def seekable(self):
- return False
-
- def readable(self):
- return False
-
- def writable(self):
- return True
diff --git a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/test_utils.py b/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/test_utils.py
deleted file mode 100755
index e4d82477..00000000
--- a/src/tools/google_trace_viewer/perf_insights/third_party/cloudstorage/test_utils.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Copyright 2013 Google Inc. All Rights Reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
-# either express or implied. See the License for the specific
-# language governing permissions and limitations under the License.
-
-"""Utils for testing."""
-
-
-class MockUrlFetchResult(object):
-
- def __init__(self, status, headers, body):
- self.status_code = status
- self.headers = headers
- self.content = body
- self.content_was_truncated = False
- self.final_url = None
diff --git a/src/tools/google_trace_viewer/tracing/.allow-devtools-save b/src/tools/google_trace_viewer/tracing/.allow-devtools-save
deleted file mode 100755
index e69de29b..00000000
diff --git a/src/tools/google_trace_viewer/tracing/.bowerrc b/src/tools/google_trace_viewer/tracing/.bowerrc
deleted file mode 100755
index d981a1dd..00000000
--- a/src/tools/google_trace_viewer/tracing/.bowerrc
+++ /dev/null
@@ -1,3 +0,0 @@
-{
- "directory": "third_party/components"
-}
diff --git a/src/tools/google_trace_viewer/tracing/.gitignore b/src/tools/google_trace_viewer/tracing/.gitignore
deleted file mode 100755
index c17a21f6..00000000
--- a/src/tools/google_trace_viewer/tracing/.gitignore
+++ /dev/null
@@ -1,9 +0,0 @@
-*.pyc
-*.swp
-.idea
-.DS_Store
-*~
-.project
-.settings
-bin/trace_viewer*.html
-test_data/measurmt-traces
diff --git a/src/tools/google_trace_viewer/tracing/AUTHORS b/src/tools/google_trace_viewer/tracing/AUTHORS
deleted file mode 100755
index 5a7ed829..00000000
--- a/src/tools/google_trace_viewer/tracing/AUTHORS
+++ /dev/null
@@ -1,12 +0,0 @@
-# Names should be added to this file with this pattern:
-#
-# For individuals:
-# Name
-#
-# For organizations:
-# Organization
-#
-# See python fnmatch module documentation for more information.
-
-The Chromium Authors <*@chromium.org>
-Google Inc. <*@google.com>
diff --git a/src/tools/google_trace_viewer/tracing/BUILD.gn b/src/tools/google_trace_viewer/tracing/BUILD.gn
deleted file mode 100755
index c2ce3eb4..00000000
--- a/src/tools/google_trace_viewer/tracing/BUILD.gn
+++ /dev/null
@@ -1,27 +0,0 @@
-# Copyright 2014 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-gypi_values = exec_script("//build/gypi_to_gn.py",
- [ rebase_path("trace_viewer.gypi") ],
- "scope",
- [ "trace_viewer.gypi" ])
-
-# TODO: ideally this would go into the target_gen_dir, but this requires some
-# changes to the scripts that process them.
-output_resource_dir = "$root_gen_dir/content/browser/tracing"
-
-action("generate_about_tracing") {
- script = "build/generate_about_tracing_contents"
-
- inputs = gypi_values.tracing_css_files + gypi_values.tracing_js_html_files +
- gypi_values.tracing_img_files
- outputs = [
- "$output_resource_dir/about_tracing.js",
- "$output_resource_dir/about_tracing.html",
- ]
-
- args = [
- "--outdir", rebase_path(output_resource_dir, root_build_dir),
- ]
-}
diff --git a/src/tools/google_trace_viewer/tracing/LICENSE b/src/tools/google_trace_viewer/tracing/LICENSE
deleted file mode 100755
index e6c0d72a..00000000
--- a/src/tools/google_trace_viewer/tracing/LICENSE
+++ /dev/null
@@ -1,27 +0,0 @@
-// Copyright (c) 2012 The Chromium Authors. All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are
-// met:
-//
-// * Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// * Redistributions in binary form must reproduce the above
-// copyright notice, this list of conditions and the following disclaimer
-// in the documentation and/or other materials provided with the
-// distribution.
-// * Neither the name of Google Inc. nor the names of its
-// contributors may be used to endorse or promote products derived from
-// this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/src/tools/google_trace_viewer/tracing/OWNERS b/src/tools/google_trace_viewer/tracing/OWNERS
deleted file mode 100755
index 72e8ffc0..00000000
--- a/src/tools/google_trace_viewer/tracing/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-*
diff --git a/src/tools/google_trace_viewer/tracing/PRESUBMIT.py b/src/tools/google_trace_viewer/tracing/PRESUBMIT.py
deleted file mode 100755
index 1c1bf199..00000000
--- a/src/tools/google_trace_viewer/tracing/PRESUBMIT.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-import sys
-
-def RunChecks(depot_tools_input_api, depot_tools_output_api):
- from build import tv_input_api
- input_api = tv_input_api.TvInputAPI(depot_tools_input_api)
-
- results = []
- from tracing.build import check_gypi
- err = check_gypi.GypiCheck()
- if err:
- results += [err]
-
- from tracing.build import check_modules
- err = check_modules.CheckModules()
- if err:
- results += [err]
-
- from tracing.build import js_checks
- results += js_checks.RunChecks(input_api)
-
- return map(depot_tools_output_api.PresubmitError, results)
-
-def CheckChange(input_api, output_api):
- original_sys_path = sys.path
- try:
- sys.path += [input_api.PresubmitLocalPath()]
- return RunChecks(input_api, output_api)
- finally:
- sys.path = original_sys_path
-
-def CheckChangeOnUpload(input_api, output_api):
- return CheckChange(input_api, output_api)
-
-def CheckChangeOnCommit(input_api, output_api):
- return CheckChange(input_api, output_api)
diff --git a/src/tools/google_trace_viewer/tracing/__init__.py b/src/tools/google_trace_viewer/tracing/__init__.py
deleted file mode 100755
index b471136b..00000000
--- a/src/tools/google_trace_viewer/tracing/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright (c) 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-import os
-import sys
-
-def _SetupTVCMPath():
- tvcm_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
- 'third_party', 'tvcm'))
- if tvcm_path not in sys.path:
- sys.path.append(tvcm_path)
-
-_SetupTVCMPath()
diff --git a/src/tools/google_trace_viewer/tracing/app.yaml b/src/tools/google_trace_viewer/tracing/app.yaml
deleted file mode 100755
index e2a265a3..00000000
--- a/src/tools/google_trace_viewer/tracing/app.yaml
+++ /dev/null
@@ -1,53 +0,0 @@
-# Copyright 2015 The Chromium Authors. All rights reserved.
-# Use of this source code is governed by a BSD-style license that can be
-# found in the LICENSE file.
-
-runtime: python27
-threadsafe: true
-api_version: 1
-
-handlers:
-
-- url: /base
- static_dir: tracing/tracing/base
- secure: always
-
-- url: /core
- static_dir: tracing/tracing/core
- secure: always
-
-- url: /extras
- static_dir: tracing/tracing/extras
- secure: always
-
-- url: /ui
- static_dir: tracing/tracing/ui
- secure: always
-
-- url: /components
- static_dir: tracing/third_party/components
- secure: always
-
-- url: /trace_viewer
- static_dir: tracing/tracing
- secure: always
-
-- url: /gl-matrix-min.js
- static_files: tracing/third_party/gl-matrix/dist/gl-matrix-min.js
- upload: tracing/third_party/gl-matrix/dist/gl-matrix-min.js
- secure: always
-
-- url: /jszip.min.js
- static_files: tracing/third_party/jszip/jszip.min.js
- upload: tracing/third_party/jszip/jszip.min.js
- secure: always
-
-- url: /d3.min.js
- static_files: tracing/third_party/d3/d3.min.js
- upload: tracing/third_party/d3/d3.min.js
- secure: always
-
-- url: /.*
- static_files: tracing/tracing/ui/extras/drive/index.html
- upload: tracing/tracing/ui/extras/drive/index.html
- secure: always
diff --git a/src/tools/google_trace_viewer/tracing/bin/index.html b/src/tools/google_trace_viewer/tracing/bin/index.html
deleted file mode 100755
index eea5c2ee..00000000
--- a/src/tools/google_trace_viewer/tracing/bin/index.html
+++ /dev/null
@@ -1,48 +0,0 @@
-
-
-
-
-
-
-
-
-
-
-
-