diff --git a/README.md b/README.md index 5aa49ee5..f9d1c8bb 100644 --- a/README.md +++ b/README.md @@ -238,6 +238,21 @@ CSV files that correspond to each of the tables inside of the database. You can learn more about the `integrate` sub-command by typing `chasten integrate --help`. +## šŸ’  Verbose Output + +When utilizing the `chasten` command, appending this `--verbose` flag can significantly enhance your troubleshooting experience and provide a detailed understanding of the tool's functionality. Here is an example with `chasten analyze lazytracker`: + +```shell +chasten analyze lazytracker \ + --config \ + --search-path \ + --save-directory \ + --save + --verbose + ``` + +Upon executing this command, you can expect the output to contain informative messages such as `āœØ Matching source code:` indicating that the tool is actively comparing the source code against the specified patterns. Additionally, you will receive detailed match results, providing insights into the identified checks. + ## šŸŒ„ Results If you want to create an interactive analysis dashboard that uses šŸ“¦ diff --git a/chasten-test b/chasten-test new file mode 160000 index 00000000..2d8478da --- /dev/null +++ b/chasten-test @@ -0,0 +1 @@ +Subproject commit 2d8478da0234a1425f5e767e0d1a69d17ec26aa4 diff --git a/chasten/constants.py b/chasten/constants.py index 51963182..600f42d0 100644 --- a/chasten/constants.py +++ b/chasten/constants.py @@ -1,6 +1,7 @@ """Define constants with dataclasses for use in chasten.""" from dataclasses import dataclass +from pathlib import Path # chasten constant @@ -8,6 +9,7 @@ class Chasten: """Define the Chasten dataclass for constant(s).""" + Analyze_Storage: Path Application_Name: str Application_Author: str Chasten_Database_View: str @@ -26,6 +28,7 @@ class Chasten: chasten = Chasten( + Analyze_Storage=Path("analysis.md"), Application_Name="chasten", Application_Author="ChastenedTeam", Chasten_Database_View="chasten_complete", diff --git a/chasten/database.py b/chasten/database.py index bc0a144f..299c76e7 100644 --- a/chasten/database.py +++ b/chasten/database.py @@ -6,7 +6,7 @@ from sqlite_utils import Database -from chasten import constants, enumerations, filesystem, output +from chasten import constants, enumerations, filesystem, output, util CHASTEN_SQL_SELECT_QUERY = """ SELECT @@ -66,7 +66,6 @@ def enable_full_text_search(chasten_database_name: str) -> None: database["sources"].enable_fts( [ "filename", - "filelines", "check_id", "check_name", "check_description", @@ -129,18 +128,6 @@ def display_datasette_details( output.console.print() -def executable_name(OpSystem: str = "Linux") -> str: - """Get the executable directory depending on OS""" - exe_directory = "/bin/" - executable_name = constants.datasette.Datasette_Executable - # Checks if the OS is windows and changed where to search if true - if OpSystem == "Windows": - exe_directory = "/Scripts/" - executable_name += ".exe" - virtual_env_location = sys.prefix - return virtual_env_location + exe_directory + executable_name - - def start_datasette_server( # noqa: PLR0912, PLR0913 database_path: Path, datasette_metadata: Path, @@ -160,7 +147,9 @@ def start_datasette_server( # noqa: PLR0912, PLR0913 # chasten will exist in a bin directory. For instance, the "datasette" # executable that is a dependency of chasten can be found by starting # the search from this location for the virtual environment. - full_executable_name = executable_name(OpSystem) + full_executable_name = util.executable_name( + constants.datasette.Datasette_Executable, OpSystem + ) (found_executable, executable_path) = filesystem.can_find_executable( full_executable_name ) @@ -224,7 +213,7 @@ def start_datasette_server( # noqa: PLR0912, PLR0913 # datasette-publish-fly plugin) and thus need to exit and not proceed if not found_publish_platform_executable: output.console.print( - ":person_shrugging: Was not able to find '{datasette_platform}'" + f":person_shrugging: Was not able to find '{datasette_platform}'" ) return None # was able to find the fly or vercel executable that will support the @@ -276,3 +265,25 @@ def start_datasette_server( # noqa: PLR0912, PLR0913 # there is debugging output in the console to indicate this option. proc = subprocess.Popen(cmd) proc.wait() + + +def display_results_frog_mouth(result_file, OpSystem) -> None: + """Run frogmouth as a subprocess of chasten""" + cmd = [ + "frogmouth", + result_file, + ] + executable = util.executable_name("frogmouth", OpSystem) + exec_found, executable_path = filesystem.can_find_executable(executable) + if exec_found: + # run frogmouth with specified path + output.console.print("\nšŸø Frogmouth Information\n") + output.console.print(f" {small_bullet_unicode} Venv: {sys.prefix}") + output.console.print(f" {small_bullet_unicode} Program: {executable_path}") + proc = subprocess.Popen(cmd) + proc.wait() + else: + output.console.print( + ":person_shrugging: Was not able to find frogmouth executable try installing it separately" + ) + return None diff --git a/chasten/filesystem.py b/chasten/filesystem.py index 3755ecc7..1af06532 100644 --- a/chasten/filesystem.py +++ b/chasten/filesystem.py @@ -229,7 +229,7 @@ def write_dict_results( # using indentation to ensure that JSON file is readable results_path_with_file = results_path / complete_results_file_name # use the built-in method from pathlib Path to write the JSON contents - results_path_with_file.write_text(results_json) + results_path_with_file.write_text(results_json, "utf-8") # return the name of the file that contains the JSON dictionary contents return complete_results_file_name @@ -293,7 +293,7 @@ def get_json_results(json_paths: List[Path]) -> List[Dict[Any, Any]]: # iterate through each of the provided paths to a JSON file for json_path in json_paths: # turn the contents of the current JSON file into a dictionary - json_dict = json.loads(json_path.read_text()) + json_dict = json.loads(json_path.read_text("utf-8")) # add the current dictionary to the list of dictionaries json_dicts_list.append(json_dict) # return the list of JSON dictionaries diff --git a/chasten/main.py b/chasten/main.py index f5310ab3..61265b3d 100644 --- a/chasten/main.py +++ b/chasten/main.py @@ -1,7 +1,8 @@ """šŸ’« Chasten checks the AST of a Python program.""" -import os.path +import os import sys +import time from pathlib import Path from typing import Any, Dict, List, Tuple, Union @@ -33,6 +34,7 @@ # create a small bullet for display in the output small_bullet_unicode = constants.markers.Small_Bullet_Unicode +ANALYSIS_FILE = constants.chasten.Analyze_Storage # --- @@ -126,7 +128,8 @@ def validate_file( else: output.opt_print_log(verbose, newline="") output.opt_print_log( - verbose, label=f":sparkles: Contents of {configuration_file_str}:\n" + verbose, + label=f":sparkles: Contents of {configuration_file_str}:\n", ) output.opt_print_log(verbose, config_file=configuration_file_yml) return validated @@ -136,7 +139,8 @@ def validate_configuration_files( config: Path, verbose: bool = False, ) -> Tuple[ - bool, Union[Dict[str, List[Dict[str, Union[str, Dict[str, int]]]]], Dict[Any, Any]] + bool, + Union[Dict[str, List[Dict[str, Union[str, Dict[str, int]]]]], Dict[Any, Any]], ]: """Validate the configuration.""" # there is a specified configuration directory path; @@ -359,7 +363,8 @@ def configure( # noqa: PLR0913 ) # write the configuration file for the chasten tool in the created directory filesystem.create_configuration_file( - created_directory_path, constants.filesystem.Main_Configuration_File + created_directory_path, + constants.filesystem.Main_Configuration_File, ) # write the check file for the chasten tool in the created directory filesystem.create_configuration_file( @@ -386,6 +391,12 @@ def configure( # noqa: PLR0913 @cli.command() def analyze( # noqa: PLR0912, PLR0913, PLR0915 project: str = typer.Argument(help="Name of the project."), + xpath: Path = typer.Option( + str, + "--xpath-version", + "-xp", + help="Accepts different xpath version, runs xpath version two by default.", + ), check_include: Tuple[enumerations.FilterableAttribute, str, int] = typer.Option( (None, None, 0), "--check-include", @@ -432,6 +443,17 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 "--save-xml", "-sx", help="The directory/file for the XML file(s) to be saved in.", + store_result: Path = typer.Option( + None, + "--markdown-storage", + "-r", + help="A directory for storing results in a markdown file", + exists=True, + file_okay=False, + dir_okay=True, + readable=True, + writable=True, + resolve_path=True, ), config: Path = typer.Option( None, @@ -451,10 +473,13 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 "-t", help="Specify the destination for debugging output.", ), + display: bool = typer.Option(False, help="Display results using frogmouth"), verbose: bool = typer.Option(False, help="Enable verbose mode output."), save: bool = typer.Option(False, help="Enable saving of output file(s)."), + force: bool = typer.Option(False, help="Force creation of new markdown file"), ) -> None: """šŸ’« Analyze the AST of Python source code.""" + start_time = time.time() # output the preamble, including extra parameters specific to this function output_preamble( verbose, @@ -465,6 +490,7 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 ) # extract the current version of the program chasten_version = util.get_chasten_version() + output.logger.debug(f"Current version of chasten: {chasten_version}") # create the include and exclude criteria include = results.CheckCriterion( attribute=str(checks.fix_check_criterion(check_include[0])), @@ -528,6 +554,27 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 "\n:person_shrugging: Cannot perform analysis due to invalid search directory.\n" ) sys.exit(constants.markers.Non_Zero_Exit) + if store_result: + # creates an empty string for storing results temporarily + analysis_result = "" + analysis_file_dir = store_result / ANALYSIS_FILE + # clears markdown file of results if it exists and new results are to be store + if filesystem.confirm_valid_file(analysis_file_dir): + if not force: + if display: + database.display_results_frog_mouth( + analysis_file_dir, util.get_OS() + ) + sys.exit(0) + else: + output.console.print( + "File already exists: use --force to recreate markdown directory." + ) + sys.exit(constants.markers.Non_Zero_Exit) + else: + analysis_file_dir.write_text("") + # creates file if doesn't exist already + analysis_file_dir.touch() # create the list of directories valid_directories = [input_path] # output the list of directories subject to checking @@ -542,7 +589,9 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 # iterate through and perform each of the checks for current_check in check_list: # extract the pattern for the current check - current_xpath_pattern = str(current_check[constants.checks.Check_Pattern]) # type: ignore + current_xpath_pattern = str( + current_check[constants.checks.Check_Pattern] + ) # type: ignore # extract the minimum and maximum values for the checks, if they exist # note that this function will return None for a min or a max if # that attribute does not exist inside of the current_check; importantly, @@ -551,15 +600,24 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 # extract details about the check to display in the header # of the syntax box for this specific check check_id = current_check[constants.checks.Check_Id] # type: ignore + output.logger.debug(f"check id: {check_id}") check_name = current_check[constants.checks.Check_Name] # type: ignore check_description = checks.extract_description(current_check) # search for the XML contents of an AST that match the provided # XPATH query using the search_python_file in search module of pyastgrep; # this looks for matches across all path(s) in the specified source path - match_generator = pyastgrepsearch.search_python_files( - paths=valid_directories, expression=current_xpath_pattern, xpath2=True - ) - # materialize a list from the generator of (potential) matches; + # match_generator = pyastgrepsearch.search_python_files( + # paths=valid_directories, expression=current_xpath_pattern, xpath2=True + # ) + if xpath == "1.0": + match_generator = pyastgrepsearch.search_python_files( + paths=valid_directories, expression=current_xpath_pattern, xpath2=False + ) + else: + match_generator = pyastgrepsearch.search_python_files( + paths=valid_directories, expression=current_xpath_pattern, xpath2=True + ) + # materia>>> mastlize a list from the generator of (potential) matches; # note that this list will also contain an object that will # indicate that the analysis completed for each located file match_generator_list = list(match_generator) @@ -594,6 +652,19 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 f" {check_status_symbol} id: '{check_id}', name: '{check_name}'" + f", pattern: '{current_xpath_pattern_escape}', min={min_count}, max={max_count}" ) + if store_result: + # makes the check marks or x's appear as words instead for markdown + check_pass = ( + "PASSED:" + if check_status_symbol == "[green]\u2713[/green]" + else "FAILED:" + ) + # stores check type in a string to stored in file later + analysis_result += ( + f"\n# {check_pass} **ID:** '{check_id}', **Name:** '{check_name}'" + + f", **Pattern:** '{current_xpath_pattern_escape}', min={min_count}, max={max_count}\n\n" + ) + # for each potential match, log and, if verbose model is enabled, # display details about each of the matches current_result_source = results.Source( @@ -630,6 +701,9 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 output.console.print( f" {small_bullet_unicode} {file_name} - {len(matches_list)} matches" ) + if store_result: + # stores details of checks in string to be stored later + analysis_result += f" - {file_name} - {len(matches_list)} matches\n" # extract the lines of source code for this file; note that all of # these matches are organized for the same file and thus it is # acceptable to extract the lines of the file from the first match @@ -659,7 +733,10 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 ), linematch_context=util.join_and_preserve( current_match.file_lines, - max(0, position_end - constants.markers.Code_Context), + max( + 0, + position_end - constants.markers.Code_Context, + ), position_end + constants.markers.Code_Context, ), ) @@ -667,9 +744,19 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 # pyastgrepsearch.Match for verbose debugging output as needed current_check_save._matches.append(current_match) # add the match to the listing of matches for the current check - current_check_save.matches.append(current_match_for_current_check_save) # type: ignore + current_check_save.matches.append( + current_match_for_current_check_save + ) # type: ignore # add the current source to main object that contains a list of source chasten_results_save.sources.append(current_result_source) + # add the amount of total matches in each check to the end of each checks output + output.console.print(f" = {len(match_generator_list)} total matches\n") + # calculate the final count of matches found + total_result = util.total_amount_passed(chasten_results_save, len(check_list)) + # display checks passed, total amount of checks, and percentage of checks passed + output.console.print( + f":computer: {total_result[0]} / {total_result[1]} checks passed ({total_result[2]}%)\n" + ) # display all of the analysis results if verbose output is requested output.print_analysis_details(chasten_results_save, verbose=verbose) # save all of the results from this analysis @@ -766,10 +853,28 @@ def analyze( # noqa: PLR0912, PLR0913, PLR0915 # confirm whether or not all of the checks passed # and then display the appropriate diagnostic message all_checks_passed = all(check_status_list) + end_time = time.time() + elapsed_time = end_time - start_time + if not all_checks_passed: output.console.print("\n:sweat: At least one check did not pass.") + if store_result: + # writes results of analyze into a markdown file + analysis_file_dir.write_text(analysis_result, encoding="utf-8") + output.console.print( + f"\n:sparkles: Results saved in: {os.path.abspath(analysis_file_dir)}\n" + ) sys.exit(constants.markers.Non_Zero_Exit) - output.console.print("\n:joy: All checks passed.") + output.console.print( + f"\n:joy: All checks passed. Elapsed Time: {elapsed_time} seconds" + ) + if store_result: + # writes results of analyze into a markdown file + result_path = os.path.abspath(analysis_file_dir) + analysis_file_dir.write_text(analysis_result, encoding="utf-8") + output.console.print(f"\n:sparkles: Results saved in: {result_path}\n") + if display: + database.display_results_frog_mouth(result_path, util.get_OS()) @cli.command() @@ -836,7 +941,7 @@ def integrate( # noqa: PLR0913 if combined_json_file_name: output.console.print(f"\n:sparkles: Saved the file '{combined_json_file_name}'") # "flatten" (i.e., "un-nest") the now-saved combined JSON file using flatterer - # create the SQLite3 database and then configure the database for use in datasett + # create the SQLite3 database and then configure the database for use in datasette combined_flattened_directory = filesystem.write_flattened_csv_and_database( combined_json_file_name, output_directory, diff --git a/chasten/util.py b/chasten/util.py index ec03cf92..70e94d64 100644 --- a/chasten/util.py +++ b/chasten/util.py @@ -2,6 +2,7 @@ import importlib.metadata import platform +import sys from chasten import constants @@ -25,6 +26,17 @@ def get_OS() -> str: return OpSystem +def executable_name(executable_name: str, OpSystem: str = "Linux") -> str: + """Get the executable directory depending on OS""" + exe_directory = "/bin/" + # Checks if the OS is windows and changed where to search if true + if OpSystem == "Windows": + exe_directory = "/Scripts/" + executable_name += ".exe" + virtual_env_location = sys.prefix + return virtual_env_location + exe_directory + executable_name + + def get_symbol_boolean(answer: bool) -> str: """Produce a symbol-formatted version of a boolean value of True or False.""" if answer: @@ -54,3 +66,17 @@ def get_chasten_version() -> str: def join_and_preserve(data, start, end): """Join and preserve lines inside of a list.""" return constants.markers.Newline.join(data[start:end]) + + +def total_amount_passed(analyze_result, count_total) -> tuple[int, int, float]: + """Calculate amount of checks passed in analyze""" + try: + # iterate through check sources to find checks passed + list_passed = [x.check.passed for x in analyze_result.sources] + # set variables to count true checks and total counts + count_true = list_passed.count(True) + # return tuple of checks passed, total checks, percentage of checks passed + return (count_true, count_total, (count_true / count_total) * 100) + # return exception when dividing by zero + except ZeroDivisionError: + return (0, 0, 0.0) diff --git a/poetry.lock b/poetry.lock index 3b1fbbf6..199e2006 100644 --- a/poetry.lock +++ b/poetry.lock @@ -681,6 +681,23 @@ ijson = "*" orjson = "*" pandas = "*" +[[package]] +name = "frogmouth" +version = "0.9.0" +description = "A Markdown document viewer for the terminal" +optional = false +python-versions = ">=3.8,<4.0" +files = [ + {file = "frogmouth-0.9.0-py3-none-any.whl", hash = "sha256:fd9eb4cd1c7aa42d4110d2b6c25022d558e5aceb6890412df345674fc4ada10f"}, + {file = "frogmouth-0.9.0.tar.gz", hash = "sha256:b0735b730babe2d37c45fc5947d5aa7065880d92c5d823a354066a036b1deb5d"}, +] + +[package.dependencies] +httpx = ">=0.24.1,<0.25.0" +textual = ">=0.32,<0.33" +typing-extensions = ">=4.5.0,<5.0.0" +xdg = ">=6.0.0,<7.0.0" + [[package]] name = "h11" version = "0.14.0" @@ -694,13 +711,13 @@ files = [ [[package]] name = "httpcore" -version = "0.18.0" +version = "0.17.3" description = "A minimal low-level HTTP client." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "httpcore-0.18.0-py3-none-any.whl", hash = "sha256:adc5398ee0a476567bf87467063ee63584a8bce86078bf748e48754f60202ced"}, - {file = "httpcore-0.18.0.tar.gz", hash = "sha256:13b5e5cd1dca1a6636a6aaea212b19f4f85cd88c366a2b82304181b769aab3c9"}, + {file = "httpcore-0.17.3-py3-none-any.whl", hash = "sha256:c2789b767ddddfa2a5782e3199b2b7f6894540b17b16ec26b2c4d8e103510b87"}, + {file = "httpcore-0.17.3.tar.gz", hash = "sha256:a6f30213335e34c1ade7be6ec7c47f19f50c56db36abef1a9dfa3815b1cb3888"}, ] [package.dependencies] @@ -715,18 +732,18 @@ socks = ["socksio (==1.*)"] [[package]] name = "httpx" -version = "0.25.0" +version = "0.24.1" description = "The next generation HTTP client." optional = false -python-versions = ">=3.8" +python-versions = ">=3.7" files = [ - {file = "httpx-0.25.0-py3-none-any.whl", hash = "sha256:181ea7f8ba3a82578be86ef4171554dd45fec26a02556a744db029a0a27b7100"}, - {file = "httpx-0.25.0.tar.gz", hash = "sha256:47ecda285389cb32bb2691cc6e069e3ab0205956f681c5b2ad2325719751d875"}, + {file = "httpx-0.24.1-py3-none-any.whl", hash = "sha256:06781eb9ac53cde990577af654bd990a4949de37a28bdb4a230d434f3a30b9bd"}, + {file = "httpx-0.24.1.tar.gz", hash = "sha256:5853a43053df830c20f8110c5e69fe44d035d850b2dfe795e196f00fdb774bdd"}, ] [package.dependencies] certifi = "*" -httpcore = ">=0.18.0,<0.19.0" +httpcore = ">=0.15.0,<0.18.0" idna = "*" sniffio = "*" @@ -2808,21 +2825,19 @@ doc = ["reno", "sphinx", "tornado (>=4.5)"] [[package]] name = "textual" -version = "0.38.1" +version = "0.32.0" description = "Modern Text User Interface framework" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "textual-0.38.1-py3-none-any.whl", hash = "sha256:8d38cbad6ac0b1320e52c7516e96b817e448d7c58d991269d3cf300108bbd191"}, - {file = "textual-0.38.1.tar.gz", hash = "sha256:504c934c3281217a29e7a95d498aacb7fbc629f6430895f7ac51ea7ba66e5d99"}, + {file = "textual-0.32.0-py3-none-any.whl", hash = "sha256:81fc68406c8806bc864e2f035874a868b4ff0cf466289dce5f7b31869949383b"}, + {file = "textual-0.32.0.tar.gz", hash = "sha256:f7b6683bc18faee6fd3c47cfbad43fbf8273c5fecc12230d52ce5ee089021327"}, ] [package.dependencies] importlib-metadata = ">=4.11.3" markdown-it-py = {version = ">=2.1.0", extras = ["linkify", "plugins"]} rich = ">=13.3.3" -tree-sitter = ">=0.20.1,<0.21.0" -tree_sitter_languages = {version = ">=1.7.0", markers = "python_version >= \"3.8\" and python_version < \"4.0\""} typing-extensions = ">=4.4.0,<5.0.0" [[package]] @@ -2850,132 +2865,6 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] -[[package]] -name = "tree-sitter" -version = "0.20.2" -description = "Python bindings for the Tree-Sitter parsing library" -optional = false -python-versions = ">=3.3" -files = [ - {file = "tree_sitter-0.20.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a151ccf9233b0b84850422654247f68a4d78f548425c76520402ea6fb6cdb24"}, - {file = "tree_sitter-0.20.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52ca2738c3c4c660c83054ac3e44a49cbecb9f89dc26bb8e154d6ca288aa06b0"}, - {file = "tree_sitter-0.20.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a8d51478ea078da7cc6f626e9e36f131bbc5fac036cf38ea4b5b81632cbac37d"}, - {file = "tree_sitter-0.20.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0b2b59e1633efbf19cd2ed1ceb8d51b2c44a278153b1113998c70bc1570b750"}, - {file = "tree_sitter-0.20.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7f691c57d2a65d6e53e2f3574153c9cd0c157ff938b8d6f252edd5e619811403"}, - {file = "tree_sitter-0.20.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ba72a363387eebaff9a0b788f864fe47da425136cbd4cac6cd125051f043c296"}, - {file = "tree_sitter-0.20.2-cp310-cp310-win32.whl", hash = "sha256:55e33eb206446d5046d3b5fe36ab300840f5a8a844246adb0ccc68c55c30b722"}, - {file = "tree_sitter-0.20.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ce9d14daba0a71a778417d9d61dd4038ca96981ddec19e1e8990881469321c"}, - {file = "tree_sitter-0.20.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:942dbfb8bc380f09b0e323d3884de07d19022930516f33b7503a6eb5f6e18979"}, - {file = "tree_sitter-0.20.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ee5651c11924d426f8d6858a40fd5090ae31574f81ef180bef2055282f43bf62"}, - {file = "tree_sitter-0.20.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fb6982b480031628dad7f229c4c8d90b17d4c281ba97848d3b100666d7fa45f"}, - {file = "tree_sitter-0.20.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:067609c6c7cb6e5a6c4be50076a380fe52b6e8f0641ee9d0da33b24a5b972e82"}, - {file = "tree_sitter-0.20.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:849d7e6b66fe7ded08a633943b30e0ed807eee76104288e6c6841433f4a9651b"}, - {file = "tree_sitter-0.20.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e85689573797e49f86e2d7cf48b9dd23bc044c477df074a78546e666d6990a29"}, - {file = "tree_sitter-0.20.2-cp311-cp311-win32.whl", hash = "sha256:098906148e44ea391a91b019d584dd8d0ea1437af62a9744e280e93163fd35ca"}, - {file = "tree_sitter-0.20.2-cp311-cp311-win_amd64.whl", hash = "sha256:2753a87094b72fe7f02276b3948155618f53aa14e1ca20588f0eeed510f68512"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:5de192cb9e7b1c882d45418decb7899f1547f7056df756bcae186bbf4966d96e"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3a77e663293a73a97edbf2a2e05001de08933eb5d311a16bdc25b9b2fac54f3"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:415da4a70c56a003758537517fe9e60b8b0c5f70becde54cc8b8f3ba810adc70"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:707fb4d7a6123b8f9f2b005d61194077c3168c0372556e7418802280eddd4892"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:75fcbfb0a61ad64e7f787eb3f8fbf29b8e2b858dc011897ad039d838a06cee02"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-win32.whl", hash = "sha256:622926530895d939fa6e1e2487e71a311c71d3b09f4c4f19301695ea866304a4"}, - {file = "tree_sitter-0.20.2-cp36-cp36m-win_amd64.whl", hash = "sha256:5c0712f031271d9bc462f1db7623d23703ed9fbcbaa6dc19ba535f58d6110774"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dfdf680ecf5619447243c4c20e4040a7b5e7afca4e1569f03c814e86bfda248"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79650ee23a15559b69542c71ed9eb3297dce21932a7c5c148be384dd0f2cd49d"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d63059746b4b2f2f87dd19c208141c69452694aae32459b7a4ebca8539d13bf4"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9398d1e214d4915032cf68a678de7eb803f64d25ef04724d70b88db7bb7746e9"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b506fb2e2bd7a5a1603c644bbb90401fe488f86bbca39706addaa8d2bfc80815"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-win32.whl", hash = "sha256:405e83804ba60ca1c3dbd258adbe0d7b0f1bdce948e5eec5587a2ebedcf930ba"}, - {file = "tree_sitter-0.20.2-cp37-cp37m-win_amd64.whl", hash = "sha256:a1e66d211c04144484e223922ac094a2367476e6f57000f986c5560dc5a83c6e"}, - {file = "tree_sitter-0.20.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f8adc325c74c042204ed47d095e0ec86f83de3c7ec4979645f86b58514f60297"}, - {file = "tree_sitter-0.20.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:beb49c861e1d111e0df119ecbfaa409e6413b8d91e8f56bcdb15f07fbc35594e"}, - {file = "tree_sitter-0.20.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e17ee83409b01fdd09021997b0c747be2f773bb2bb140ba6fb48b7e12fdd039a"}, - {file = "tree_sitter-0.20.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:475ab841647a0d1bc1266c8978279f8e4f7b9520b9a7336d532e5dfc8910214d"}, - {file = "tree_sitter-0.20.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:222350189675d9814966a5c88c6c1378a2ee2f3041c439a6f1d1ff2006f403aa"}, - {file = "tree_sitter-0.20.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:31ea52f0deee70f2cb00aff01e40aae325a34ebe1661de274c9107322fb95f54"}, - {file = "tree_sitter-0.20.2-cp38-cp38-win32.whl", hash = "sha256:cceaf7287137cbca707006624a4a8d4b5ccbfec025793fde84d90524c2bb0946"}, - {file = "tree_sitter-0.20.2-cp38-cp38-win_amd64.whl", hash = "sha256:25b9669911f21ec2b3727bb2f4dfeff6ddb6f81898c3e968d378a660e0d7f90e"}, - {file = "tree_sitter-0.20.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce30a17f46a6b39a04a599dea88c127a19e3e1f43a2ad0ced71b5c032d585077"}, - {file = "tree_sitter-0.20.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e9576e8b2e663639527e01ab251b87f0bd370bfdd40515588689ebc424aec786"}, - {file = "tree_sitter-0.20.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d03731a498f624ce3536c821ef23b03d1ad569b3845b326a5b7149ef189d732c"}, - {file = "tree_sitter-0.20.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef0116ecb163573ebaa0fc04cc99c90bd94c0be5cc4d0a1ebeb102de9cc9a054"}, - {file = "tree_sitter-0.20.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0943b00d3700f253c3ee6a53a71b9a6ca46defd9c0a33edb07a9388e70dc3a9e"}, - {file = "tree_sitter-0.20.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8cb566b6f0b5457148cb8310a1ca3d764edf28e47fcccfe0b167861ecaa50c12"}, - {file = "tree_sitter-0.20.2-cp39-cp39-win32.whl", hash = "sha256:4544204a24c2b4d25d1731b0df83f7c819ce87c4f2538a19724b8753815ef388"}, - {file = "tree_sitter-0.20.2-cp39-cp39-win_amd64.whl", hash = "sha256:9517b204e471d6aa59ee2232f6220f315ed5336079034d5c861a24660d6511d6"}, - {file = "tree_sitter-0.20.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:84343678f58cb354d22ed14b627056ffb33c540cf16c35a83db4eeee8827b935"}, - {file = "tree_sitter-0.20.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:611a80171d8fa6833dd0c8b022714d2ea789de15a955ec42ec4fd5fcc1032edb"}, - {file = "tree_sitter-0.20.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bacecfb61694c95ccee462742b3fcea50ba1baf115c42e60adf52b549ef642ce"}, - {file = "tree_sitter-0.20.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f344ae94a268479456f19712736cc7398de5822dc74cca7d39538c28085721d0"}, - {file = "tree_sitter-0.20.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:221784d7f326fe81ce7174ac5972800f58b9a7c5c48a03719cad9830c22e5a76"}, - {file = "tree_sitter-0.20.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64210ed8d2a1b7e2951f6576aa0cb7be31ad06d87da26c52961318fc54c7fe77"}, - {file = "tree_sitter-0.20.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2634ac73b39ceacfa431d6d95692eae7465977fa0b9e9f7ae6cb445991e829a5"}, - {file = "tree_sitter-0.20.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:71663a0e8230dae99d9c55e6895bd2c9e42534ec861b255775f704ae2db70c1d"}, - {file = "tree_sitter-0.20.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:32c3e0f30b45a58d36bf6a0ec982ca3eaa23c7f924628da499b7ad22a8abad71"}, - {file = "tree_sitter-0.20.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b02e4ab2158c25f6f520c93318d562da58fa4ba53e1dbd434be008f48104980"}, - {file = "tree_sitter-0.20.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10e567eb6961a1e86aebbe26a9ca07d324f8529bca90937a924f8aa0ea4dc127"}, - {file = "tree_sitter-0.20.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:63f8e8e69f5f25c2b565449e1b8a2aa7b6338b4f37c8658c5fbdec04858c30be"}, - {file = "tree_sitter-0.20.2.tar.gz", hash = "sha256:0a6c06abaa55de174241a476b536173bba28241d2ea85d198d33aa8bf009f028"}, -] - -[[package]] -name = "tree-sitter-languages" -version = "1.7.0" -description = "Binary Python wheels for all tree sitter languages." -optional = false -python-versions = "*" -files = [ - {file = "tree_sitter_languages-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:fd8b856c224a74c395ed9495761c3ef8ba86014dbf6037d73634436ae683c808"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:277d1bec6e101a26a4445cd7cb1eb8f8cf5a9bbad1ca80692bfae1af63568272"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0473bd896799ccc87f428766813ddedd3506cad8430dbe863b663c81d7387680"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb6799419bc7e3029112f2a3f8b77b6c299f94f03bb70e5c31a437b3180486be"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e5b705c8ce6ef47fc461484878956ecd42a67cbeb0a17e323b86a4439a8fdc3d"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:28a732be6fced2f70184c1b34f64961e3b6259fe6d5f7540c91028c2a43a7109"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-win32.whl", hash = "sha256:f5cdb1ec88f0b8c617330c953555a20cc7e96ca6b1f5c68ab6db347e869cfeeb"}, - {file = "tree_sitter_languages-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:26cb344a75798fce1a73b690504d8e7789f6ba25a178efcd203444d7868caf38"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:433b56cb3dca02b30f21c596f431a2cff90905326be1f8913c3515acb984b21e"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96686390e1a01af44aedef7b33d6be82de3cf674a98a5c7b417e540e6afa62cc"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25a4b6d559fbd76c6ec1b73cf03d09f53aaa5a1b61078a3f518b162866d9d97e"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e504f199c7a4c8b1b1efb05a063450aa23234feea6fa6c06f4077f7248ea9c98"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6b29856e9314b5f68f05dfa45e6674f47535229dda32294ba6d129077a97759c"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:786fdaf3d2120eef9384b0f22d7e2e42a561073ba753c7b438e90a1e7b351650"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-win32.whl", hash = "sha256:a55a7007056d0927b78481b437d79ea0487cc991c7f9c19d67adcceac3d47f53"}, - {file = "tree_sitter_languages-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:4b01d3bdf7ce2aeee4d0df62071a0ca91e618a29845686a5bd714d93c5ef3b36"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b603f1ad01bfb9d178f965125e2528cb7da9666d180f4a9a1acfaedbf5862ea"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70610aa26dd985d2fb9eb07ea8eacc3ceb0cc9c2e91416f51305120cfd919e28"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0444ebc8bdb7dc0d66a816050cfd52376c4e62a94a9c54fde90b29acf3e4bab1"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7eeb5a3307ff1c0994ffff5ea37ec656a716a728b8c9359374104da521a76ded"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6c319cef16f2df667f1c165fe4eee160f2b51a0c4b61db1e70de2ab86420ca9a"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-win32.whl", hash = "sha256:b216650126d95d494f927393903e836a7ef5f0c4db0834f3a0b576f97c13abaf"}, - {file = "tree_sitter_languages-1.7.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6c96e5785d164a205962a10256808b3d12dccee9827ec88a46899063a2a2d28"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:adafeabbd8d47b80122fad18bb61c25ed3da04f5347b7d774b53826accb27b7a"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50e2bc5d2da770ecd5af94f9d716faa4764f890fd61bc0a488e9269653d9fb71"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac773097cff7de6cf265c5be9990b4c6690161452da1d9fc41021d4bf7e8c73a"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b233bfc48cf0f16436200afc7d7643cd87101c321de25b919b61f21f1693aa52"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:eab3caedf50467045ed5cab776a57b494332616376d387c6600fd7ea4f5483cf"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-win32.whl", hash = "sha256:d533f743a22f5696494d3a5a60adb4cfbef63d58b8b5622993d93d6d0a602444"}, - {file = "tree_sitter_languages-1.7.0-cp37-cp37m-win_amd64.whl", hash = "sha256:aab96f64be30c9f73d6dc958ec22bb1a9fe70e90b2d2a3d233d537b347cea729"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1bf89d771621e28847036b377f865f947e555a6654356d21beab738bb2531a69"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b2f171089ec3c4f1de275edc8f0722e1e3dc7a54e83107098315ea2f0952cfcd"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a091577d3a8454c40f813ee2834314c73cc504522f70f9e33d7c2268d33973f9"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8287efa87d080b340b583a6e81266cc3d8266deb61b8f3312649a9d1562e665a"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9c5080c06a2df7a59c69d2422a6ae83a5e37e92d57c4bd5e572d0eb5226ab3b0"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ca8f629cfb406a2f9b9f8a3a5c804d4d1ba4cdca41cccba63f51fc1bab13e5de"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-win32.whl", hash = "sha256:fd3561b37a99c9d501719819a8736529ae3a6d597128c15be432d1855f3cb0d9"}, - {file = "tree_sitter_languages-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:377ad60f7a7bf27315676c4fa84cc766aa0019c1e556083763136ed951e934c0"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a1dc71b68e48f58cd5b6a9ab7a541714201815629a6554a969cfc579a6ee6e53"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fb1521367b14c275bef70997ea90526e7049f840ba1bbd3ef56c72f5b15596e9"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f73651f7e78371dc3d455e8aba510cc6fb9e1ac1d648c3334157950781eb295"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:049b0dd63be721fe3f9642a2b5a044bea2852de2b35818467996242ae4b7f01f"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c428a8e1f5ecc4eb5c79abff3eb2881123446cde16fd1d8866d527470a6fdd2f"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:40fb3fc11ff90caf65b4713feeb6c4852e5d2a04ef8ae6a2ac734a702a6a6c7e"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-win32.whl", hash = "sha256:f28e9904833b7a909f8227c4560401049bd3310cebe3e0a884d9461f783b9af2"}, - {file = "tree_sitter_languages-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ea47ee390ec2e1c9bf96d7b418775263766021a834910c9f2d578f95a3e27d0f"}, -] - -[package.dependencies] -tree-sitter = "*" - [[package]] name = "trogon" version = "0.5.0" @@ -3143,6 +3032,17 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog"] +[[package]] +name = "xdg" +version = "6.0.0" +description = "Variables defined by the XDG Base Directory Specification" +optional = false +python-versions = ">=3.7,<4.0" +files = [ + {file = "xdg-6.0.0-py3-none-any.whl", hash = "sha256:df3510755b4395157fc04fc3b02467c777f3b3ca383257397f09ab0d4c16f936"}, + {file = "xdg-6.0.0.tar.gz", hash = "sha256:24278094f2d45e846d1eb28a2ebb92d7b67fc0cab5249ee3ce88c95f649a1c92"}, +] + [[package]] name = "zipp" version = "3.17.0" @@ -3161,4 +3061,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "c1e0fbfc2f5338b34fbee114e1e7365c24060c59b6e1c8420d9421950f4fc96d" +content-hash = "06c3ba10769c0bdc8b2eedf082e606b84e9af030e683d3284b2f096d46996a1b" diff --git a/pyproject.toml b/pyproject.toml index 08dfbc7f..fae05f81 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -14,6 +14,7 @@ rich = "^13.4.2" typer = {extras = ["all"], version = "^0.9.0"} pyastgrep = "^1.2.2" trogon = "^0.5.0" +frogmouth = "^0.9.0" pydantic = "^2.0.3" platformdirs = "^3.8.1" pyyaml = "^6.0" diff --git a/scripts/extract_coverage.py b/scripts/extract_coverage.py index 680c65ca..53d9be36 100644 --- a/scripts/extract_coverage.py +++ b/scripts/extract_coverage.py @@ -12,5 +12,5 @@ filename = "chasten/util.py" covered_lines = set(data.lines(filename)) # type: ignore -print(f"Covered lines in {filename}:") # noqa +print(f"Covered lines in {filename}:") # noqa print(covered_lines) # noqa diff --git a/tests/test_database.py b/tests/test_database.py deleted file mode 100644 index dd1ad2c3..00000000 --- a/tests/test_database.py +++ /dev/null @@ -1,9 +0,0 @@ -"""Pytest test suite for the database module.""" - -from chasten import database, filesystem, util - - -def test_executable_name() -> None: - assert filesystem.can_find_executable( - database.executable_name(OpSystem=util.get_OS()) - ) diff --git a/tests/test_main.py b/tests/test_main.py index 0b0783c0..07fb9ca8 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -310,3 +310,119 @@ def test_fuzz_cli_analyze_single_directory(cwd, directory): ], ) assert result.exit_code == 0 + + +def test_analyze_store_results_file_does_not_exist(cwd, tmpdir): + """Makes sure analyze doesn't crash when using markdown storage.""" + tmp_dir = Path(tmpdir) + project_name = "testing" + # create a reference to the internal + # .chasten directory that supports testing + configuration_directory = str(cwd) + "/.chasten" + result = runner.invoke( + main.cli, + [ + "analyze", + "--search-path", + cwd, + project_name, + "--config", + configuration_directory, + "--markdown-storage", + tmp_dir, + ], + ) + assert result.exit_code == 0 + assert "āœØ Results saved in:" in result.output + + +def test_analyze_store_results_file_exists_no_force(cwd, tmpdir): + """Make sure Analyze acts accordingly when file exists and their is no force""" + tmp_dir = Path(tmpdir) + # creates a temporary directory to store markdown file + file = tmp_dir / "analysis.md" + # creates file if does not exist + file.touch() + # makes sure the file exists + assert file.exists() + project_name = "testing" + # create a reference to the internal + # .chasten directory that supports testing + configuration_directory = str(cwd) + "/.chasten" + # runs the CLI with the specified commands + result = runner.invoke( + main.cli, + [ + "analyze", + "--search-path", + cwd, + project_name, + "--config", + configuration_directory, + "--markdown-storage", + tmp_dir, + ], + ) + # assert that the code crashes and that the proper message is displayed + assert result.exit_code == 1 + assert ( + "File already exists: use --force to recreate markdown directory." + in result.output + ) + + +def test_analyze_store_results_file_exists_force(cwd, tmpdir): + tmp_dir = Path(tmpdir) + # creates a temporary directory to store markdown file + file = tmp_dir / "analysis.md" + # creates file if does not exist + file.touch() + # makes sure the file exists + assert file.exists() + project_name = "testing" + # create a reference to the internal + # .chasten directory that supports testing + configuration_directory = str(cwd) + "/.chasten" + # runs the CLI with the specified commands + result = runner.invoke( + main.cli, + [ + "analyze", + "--search-path", + cwd, + project_name, + "--config", + configuration_directory, + "--markdown-storage", + tmp_dir, + "--force", + ], + ) + # assert that the code crashes and that the proper message is displayed + assert result.exit_code == 0 + assert "āœØ Results saved in:" in result.output + + +@given(directory=strategies.builds(Path)) +@settings(deadline=None, suppress_health_check=[HealthCheck.function_scoped_fixture]) +@pytest.mark.fuzz +def test_analyze_store_results_valid_path(directory, cwd): + project_name = "testing" + # create a reference to the internal + # .chasten directory that supports testing + configuration_directory = str(cwd) + "/.chasten" + result = runner.invoke( + main.cli, + [ + "analyze", + "--search-path", + cwd, + project_name, + "--config", + configuration_directory, + "--markdown-storage", + directory, + "--force", + ], + ) + assert result.exit_code == 0 diff --git a/tests/test_util.py b/tests/test_util.py index d323fa5d..86e75d62 100644 --- a/tests/test_util.py +++ b/tests/test_util.py @@ -1,9 +1,11 @@ """Pytest test suite for the util module.""" +import shutil + import pytest from hypothesis import given, strategies -from chasten import util +from chasten import constants, util def test_human_readable_boolean() -> None: @@ -28,3 +30,15 @@ def test_fuzz_human_readable_boolean_correct_string(answer: bool) -> None: assert str_answer == "Yes" else: assert str_answer == "No" + + +OpSystem = util.get_OS() +datasette_exec = constants.datasette.Datasette_Executable + + +def test_executable_name() -> None: + """Test if executable name gets correct file name""" + # makes sure the datasette executable is where expected + assert shutil.which(util.executable_name(datasette_exec, OpSystem)) + # makes sure the frogmouth executable is where expected + assert shutil.which(util.executable_name("frogmouth", OpSystem))