From f9324f8530244223b9cd2dce1da802b09e92e5d5 Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 13:39:29 +0530 Subject: [PATCH 1/9] Add RockspecHandler base and parser Implement base handler class and Lua rockspec parser using luaparser for AST-based dependency extraction. Signed-off-by: Prashanna Dahal --- src/packagedcode/rockspec.py | 478 +++++++++++++++++++++++++++++++++++ 1 file changed, 478 insertions(+) create mode 100644 src/packagedcode/rockspec.py diff --git a/src/packagedcode/rockspec.py b/src/packagedcode/rockspec.py new file mode 100644 index 0000000000..ce80fde8f7 --- /dev/null +++ b/src/packagedcode/rockspec.py @@ -0,0 +1,478 @@ +import os +import sys +import logging +import re +import traceback +from packageurl import PackageURL + +from packagedcode import models + +# Debug configuration - set via environment variables +SCANCODE_DEBUG_PACKAGE = os.environ.get('SCANCODE_DEBUG_PACKAGE', False) +TRACE = SCANCODE_DEBUG_PACKAGE + + +def logger_debug(*args): + """Dummy function that does nothing by default.""" + pass + + +logger = logging.getLogger(__name__) + +# Configure logging when debug is enabled +if TRACE: + logging.basicConfig(stream=sys.stdout) + logger.setLevel(logging.DEBUG) + + def logger_debug(*args): + """Redefine to actually log debug messages.""" + return logger.debug( + ' '.join(isinstance(a, str) and a or repr(a) for a in args) + ) + + +class RockspecHandler(models.DatafileHandler): + datasource_id = 'luarocks_rockspec' + path_patterns = ('*.rockspec',) + default_package_type = 'luarocks' + default_primary_language = 'Lua' + description = 'LuaRocks rockspec file' + documentation_url = 'https://github.com/luarocks/luarocks/blob/main/docs/rockspec_format.md' + + @classmethod + def parse(cls, location, package_only=False): + """ + Parse a rockspec file and return a PackageData object. + """ + pass + + +class ParseError: + """Structured error representation.""" + + ERROR_MANDATORY_FIELD_MISSING = 'mandatory_field_missing' + ERROR_PARSE_FAILED = 'parse_failed' + ERROR_TABLE_EXTRACTION = 'table_extraction_failed' + + def __init__(self, error_type, field, message): + self.error_type = error_type + self.field = field + self.message = message + + def __str__(self): + return self.message + + def __repr__(self): + return f"ParseError({self.error_type}, {self.field}: {self.message})" + + +class RockspecParser: + """Parse LuaRocks rockspec files using Lua AST.""" + + def __init__(self, rockspec_path): + self.rockspec_path = rockspec_path + self.ast_tree = None + self.errors = [] + + def parse(self): + """Main parsing orchestration. Reads file, parses AST, extracts all fields.""" + try: + code = self._read_file() + self.ast_tree = self._parse_lua(code) + + data = { + 'package': self._extract_package(), + 'version': self._extract_version(), + 'rockspec_format': self._extract_rockspec_format(), + 'supported_platforms': self._extract_supported_platforms(), + 'vcs_url': self._extract_source_url(), + 'description': self._extract_description(), + 'license': self._extract_license(), + 'homepage_url': self._extract_homepage(), + 'dependencies': self._extract_dependencies(), + } + return data + except Exception as e: + self.errors.append(ParseError(ParseError.ERROR_PARSE_FAILED, 'parse', str(e))) + traceback.print_exc() + return {} + + def _read_file(self): + """Read rockspec file and return content.""" + try: + with open(self.rockspec_path, 'r') as f: + return f.read() + except FileNotFoundError: + raise FileNotFoundError(f"File not found: {self.rockspec_path}") + except IOError as e: + raise IOError(f"Error reading file: {e}") + + def _parse_lua(self, code): + """Parse Lua code to AST.""" + try: + return ast.parse(code) + except Exception as e: + raise RuntimeError(f"Lua parse error: {e}") + + def _find_assignment(self, var_name): + """Find assignment node for a variable, return (target_node, value_node).""" + if not self.ast_tree: + return None + + for node in ast.walk(self.ast_tree): + # Skip nodes that aren't assignments + if not hasattr(node, 'targets') or not hasattr(node, 'values'): # type: ignore + continue + + # Skip if no targets in this assignment + if not node.targets: # type: ignore + continue + + # Check each target to find our variable + for idx, target in enumerate(node.targets): # type: ignore + # Skip if target doesn't have an id attribute + if not hasattr(target, 'id'): + continue + + # Found the variable we're looking for + if target.id == var_name: # type: ignore + # Get the corresponding value (or first value if not enough values) + value = node.values[idx] if idx < len(node.values) else (node.values[0] if node.values else None) # type: ignore + return (target, value) + + return None + + + def _extract_string_value(self, node): + """Extract string value from String node.""" + if not node or type(node).__name__ != 'String': + return None + + s_val = node.s if hasattr(node, 's') else None + if isinstance(s_val, bytes): + return s_val.decode('utf-8') + return str(s_val) if s_val else None + + def _extract_table_values(self, table_node): + """Extract key-value pairs from Table node.""" + result = {} + + if not table_node or type(table_node).__name__ != 'Table': + return result + + if not hasattr(table_node, 'fields'): + return result + + try: + for field in table_node.fields: + field_type = type(field).__name__ + + # Only process Field type nodes because they represent key-value pairs or array entries + if field_type != 'Field': + continue + + key_node = field.key if hasattr(field, 'key') else None + value_node = field.value if hasattr(field, 'value') else None + + # Skip fields without values + if not value_node: + continue + + # Extract value (works for both hash-style and array-style) + extracted_value = self._extract_value(value_node) + if extracted_value is None: + continue + + # Hash-style field: {key = value} + if key_node: + key = self._extract_key(key_node) + if key is not None: + result[key] = extracted_value + # Array-style field: {value} + else: + result[len(result)] = extracted_value + + except Exception as e: + error_msg = f'Error extracting table values: {e}' + self.errors.append(ParseError(ParseError.ERROR_TABLE_EXTRACTION, 'table', error_msg)) + + return result + + def _extract_key(self, key_node): + """Extract key from field key node.""" + if not key_node: + return None + + node_type = type(key_node).__name__ + + if node_type == 'String': + return self._extract_string_value(key_node) + elif node_type == 'Name' or node_type == 'Id': + return key_node.id if hasattr(key_node, 'id') else None + elif node_type == 'Number': + n_val = key_node.n if hasattr(key_node, 'n') else None + return str(n_val) if n_val is not None else None + + return None + + def _extract_value(self, node): + """Extract value from any AST node.""" + if node is None: + return None + + node_type = type(node).__name__ + + # Handle each node type + if node_type == 'String': + return self._extract_string_value(node) + + elif node_type == 'Number': + number_value = node.n if hasattr(node, 'n') else None + return number_value + + elif node_type == 'Boolean': + bool_value = node.value if hasattr(node, 'value') else None + return bool_value + + elif node_type == 'Table': + return self._extract_table_values(node) + # special concat case found in the some rockspec files in the wild + elif node_type == 'Concat': + return self._extract_concat(node) + + elif node_type == 'Name': + var_name = node.id if hasattr(node, 'id') else None + if var_name: + return self._get_variable_value(var_name) + + # Unknown node type + return None + + def _get_variable_value(self, var_name): + """Look up a variable and return its value.""" + assignment = self._find_assignment(var_name) + if not assignment: + return None + + _, value = assignment + return self._extract_value(value) + + def _extract_concat(self, concat_node): + """ + Extract value from Concat node (string concatenation). + Recursively processes: left .. right + """ + if not concat_node or type(concat_node).__name__ != 'Concat': + return None + + left_node = concat_node.left if hasattr(concat_node, 'left') else None + right_node = concat_node.right if hasattr(concat_node, 'right') else None + + # Recursively extract values from both sides + left_value = self._extract_value(left_node) + right_value = self._extract_value(right_node) + + # Build result from available values + has_left = left_value is not None + has_right = right_value is not None + + if has_left and has_right: + return str(left_value) + str(right_value) + elif has_left: + return str(left_value) + elif has_right: + return str(right_value) + else: + return None + + def _extract_package(self): + """Extract package name (mandatory).""" + assignment = self._find_assignment('package') + if not assignment: + self.errors.append(ParseError(ParseError.ERROR_MANDATORY_FIELD_MISSING, 'package', 'Missing mandatory field: package')) + return None + + _, value = assignment + result = self._extract_value(value) + return str(result) if result else None + + def _extract_version(self): + """Extract version (mandatory).""" + assignment = self._find_assignment('version') + if not assignment: + self.errors.append(ParseError(ParseError.ERROR_MANDATORY_FIELD_MISSING, 'version', 'Missing mandatory field: version')) + return None + + _, value = assignment + result = self._extract_value(value) + return str(result) if result else None + + def _extract_rockspec_format(self): + """Extract rockspec_format (optional).""" + assignment = self._find_assignment('rockspec_format') + if not assignment: + return None + + _, value = assignment + result = self._extract_value(value) + return str(result) if result else None + + def _extract_supported_platforms(self): + """Extract supported_platforms as list (optional table).""" + assignment = self._find_assignment('supported_platforms') + if not assignment: + return [] + + _, platform_table_node = assignment + platform_dict = self._extract_table_values(platform_table_node) + + # Sort platforms by numeric index order + return self._sort_by_numeric_index(platform_dict) + + def _extract_source_url(self): + """Extract VCS URL from source table (url is mandatory).""" + assignment = self._find_assignment('source') + if not assignment: + self.errors.append(ParseError(ParseError.ERROR_MANDATORY_FIELD_MISSING, 'source', 'Missing mandatory field: source')) + return None + + _, value = assignment + source_table = self._extract_table_values(value) + + source_url = source_table.get('url') + if not source_url: + self.errors.append(ParseError(ParseError.ERROR_MANDATORY_FIELD_MISSING, 'source.url', 'Missing mandatory field: source.url')) + return None + + return str(source_url) + + def _extract_description(self): + """Extract description summary from description table (optional).""" + assignment = self._find_assignment('description') + if not assignment: + return None + + _, value = assignment + desc_table = self._extract_table_values(value) + + summary = desc_table.get('summary') + return str(summary) if summary else None + + def _extract_license(self): + """Extract license from description table (optional).""" + assignment = self._find_assignment('description') + if not assignment: + return None + + _, value = assignment + desc_table = self._extract_table_values(value) + + license_val = desc_table.get('license') + return str(license_val) if license_val else None + + def _extract_homepage(self): + """Extract homepage URL from description table (optional).""" + assignment = self._find_assignment('description') + if not assignment: + return None + + _, value = assignment + desc_table = self._extract_table_values(value) + + homepage = desc_table.get('homepage') + return str(homepage) if homepage else None + + def _extract_dependencies(self): + """Extract dependencies as list of parsed dicts (optional table).""" + assignment = self._find_assignment('dependencies') + if not assignment: + return [] + + _, dependency_table_node = assignment + dependency_strings = self._extract_table_values(dependency_table_node) + + if not dependency_strings: + return [] + + sorted_strings = self._sort_by_numeric_index(dependency_strings) + + return [ + parsed + for parsed in (self.parse_dependency(dep_string) for dep_string in sorted_strings) + if parsed is not None + ] + + def _sort_by_numeric_index(self, table_dict): + """Sort a table dict by numeric keys and return values as strings.""" + try: + # Sort by numeric key index + sorted_items = sorted( + table_dict.items(), + key=lambda x: self._numeric_key_value(x[0]) + ) + return [str(value) for _, value in sorted_items] + except Exception: + # Fallback: return values in dict order + return [str(v) for v in table_dict.values()] + + def _numeric_key_value(self, key): + """Convert key to numeric value for sorting. Non-numeric keys sort to end.""" + if isinstance(key, int): + return key + if isinstance(key, str) and key.isdigit(): + return int(key) + return float('inf') # Non-numeric keys sort to the end + + def parse_dependency(self, dep_string): + """ + Parse a Lua dependency string into name and version spec. + + Lua RockSpecs format: "package_name [operator version]" + Examples: + "inspect == 3.1.3" + "luasec == 1.3.1" + "binaryheap >= 0.4" + "somedep" (no version) + + Returns dict with keys: + - name: Package name + - version_number: Clean version number (e.g. "3.1.3") or None + - version_spec: Full version specification with operator (e.g. "== 3.1.3") or None + - raw: Original input string + """ + if not dep_string: + return None + + dep_string = str(dep_string).strip() + pattern = r'([a-zA-Z0-9_-]+)\s*(?:([>=<~=]+)\s*)?(.+)?' + match = re.match(pattern, dep_string) + + if not match: + return None + + name = match.group(1) + operator = match.group(2) + version_raw = match.group(3) + + version_number = None + version_spec = None + + if version_raw: + version_raw = version_raw.strip() + version_match = re.search(r'([0-9][0-9.]*)', version_raw) + if version_match: + version_number = version_match.group(1) + if operator: + version_spec = operator + ' ' + version_number + else: + version_spec = version_number + + return { + 'name': name, + 'version_number': version_number, + 'version_spec': version_spec, + 'raw': dep_string + } + + + From af157597fd3866d70c38ec169c07064512d88c4b Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 13:42:41 +0530 Subject: [PATCH 2/9] Implement parser logic in handler Extract package metadata and dependencies from rockspec files with version specification parsing. Signed-off-by: Prashanna Dahal --- src/packagedcode/rockspec.py | 141 ++++++++++++++++++++++++++++++++++- 1 file changed, 140 insertions(+), 1 deletion(-) diff --git a/src/packagedcode/rockspec.py b/src/packagedcode/rockspec.py index ce80fde8f7..121e05fad9 100644 --- a/src/packagedcode/rockspec.py +++ b/src/packagedcode/rockspec.py @@ -5,6 +5,7 @@ import traceback from packageurl import PackageURL +from luaparser import ast from packagedcode import models # Debug configuration - set via environment variables @@ -44,7 +45,145 @@ def parse(cls, location, package_only=False): """ Parse a rockspec file and return a PackageData object. """ - pass + parser = RockspecParser(location) + parsed_data = parser.parse() + + # mandatory fields in rockspec files + name = parsed_data.get('package') + version = parsed_data.get('version') + vcs_url = parsed_data.get('vcs_url') + + # Extract optional fields + description = parsed_data.get('description') + homepage_url = parsed_data.get('homepage_url') + extracted_license_statement = parsed_data.get('license') + + parsed_dependencies = parsed_data.get('dependencies') or [] + + if parsed_dependencies: + dependencies = cls._build_dependent_packages(parsed_dependencies) + else: + dependencies = [] + + extra_data = cls._build_extra_data(parsed_data) + + package_data = dict( + datasource_id=cls.datasource_id, + type=cls.default_package_type, + name=name, + version=version, + primary_language=cls.default_primary_language, + description=description, + homepage_url=homepage_url, + vcs_url=vcs_url, + extracted_license_statement=extracted_license_statement, + dependencies=dependencies, + extra_data=extra_data, + ) + + yield models.PackageData.from_data(package_data, package_only) + + @classmethod + def _build_dependent_packages(cls, parsed_dependencies): + """ + Convert parsed dependency dicts into DependentPackage objects. + + Args: + parsed_dependencies: List of dicts with 'name' and 'version_spec' keys + (already parsed by RockspecParser) + + Returns: + List of DependentPackage objects + """ + dependencies = [] + + for dep_dict in parsed_dependencies: + dep_obj = cls._create_dependent_package(dep_dict) + dependencies.append(dep_obj) + + return dependencies + + @classmethod + def _create_dependent_package(cls, dep_components): + """ + Create a DependentPackage object from parsed dependency components. + + Args: + dep_components: Dict with 'name', 'version_number', and 'version_spec' + (already parsed by RockspecParser.parse_dependency) + + Returns: + DependentPackage object + """ + name = dep_components.get('name') + version_number = dep_components.get('version_number') + version_spec = dep_components.get('version_spec') + + purl_str = cls._create_purl_string(name, version_number) + # Determine if pinned (exact version with == operator) + is_pinned = bool(version_spec and '==' in str(version_spec)) + + return models.DependentPackage( + purl=purl_str, + extracted_requirement=version_spec, + scope='dependencies', + is_runtime=True, + is_optional=False, + is_pinned=is_pinned, + is_direct=True, + ) + + @classmethod + def _build_extra_data(cls, parsed_data): + """ + Build extra_data dict from optional rockspec metadata. + + Args: + parsed_data: Dict with parsed rockspec fields + + Returns: + Dict with extra metadata (extensible for future fields) + """ + extra_data = {} + + rockspec_format = parsed_data.get('rockspec_format') + if rockspec_format: + extra_data['rockspec_format'] = rockspec_format + + platforms = parsed_data.get('supported_platforms') + if platforms: + extra_data['supported_platforms'] = platforms + + # Future fields can be added here + # e.g., build_backend, build_requires, etc. + + return extra_data + + @classmethod + def _create_purl_string(cls, package_name, package_version): + """ + Create a PURL string for a luarocks package. + + Args: + package_name: Name of the package + package_version: Optional version string (without operators) + + Returns: + PURL string (e.g., "pkg:luarocks/luasocket" or "pkg:luarocks/luasocket@3.1.3") + + Raises: + ValueError if package_name is empty + """ + if not package_name: + raise ValueError('Package name is required for PURL creation') + + purl = PackageURL( + type=cls.default_package_type, + name=package_name, + version=package_version + ) + return purl.to_string() + class ParseError: From d106e926238007abaea48bbc740a7cc0c104732c Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 13:43:27 +0530 Subject: [PATCH 3/9] Add unit tests for RockspecHandler * Test parser with real Kong rockspec file * Test handler integration with patterns Signed-off-by: Prashanna Dahal --- .../packagedcode/data/rockspec/test.rockspec | 60 ++ .../packagedcode/data/rockspec/test1.rockspec | 530 ++++++++++++++++++ .../packagedcode/data/rockspec/test2.rockspec | 135 +++++ .../packagedcode/data/rockspec/test3.rockspec | 55 ++ .../packagedcode/data/rockspec/test4.rockspec | 32 ++ tests/packagedcode/test_rockspec.py | 322 +++++++++++ 6 files changed, 1134 insertions(+) create mode 100644 tests/packagedcode/data/rockspec/test.rockspec create mode 100644 tests/packagedcode/data/rockspec/test1.rockspec create mode 100644 tests/packagedcode/data/rockspec/test2.rockspec create mode 100644 tests/packagedcode/data/rockspec/test3.rockspec create mode 100644 tests/packagedcode/data/rockspec/test4.rockspec create mode 100644 tests/packagedcode/test_rockspec.py diff --git a/tests/packagedcode/data/rockspec/test.rockspec b/tests/packagedcode/data/rockspec/test.rockspec new file mode 100644 index 0000000000..b4fc2ef98b --- /dev/null +++ b/tests/packagedcode/data/rockspec/test.rockspec @@ -0,0 +1,60 @@ +package = "lua-cjson" +version = "2.1.0.16-1" + +source = { + url = "git+https://github.com/openresty/lua-cjson", + tag = "2.1.0.16", +} + +description = { + summary = "A fast JSON encoding/parsing module", + detailed = [[ + The Lua CJSON module provides JSON support for Lua. It features: + - Fast, standards compliant encoding/parsing routines + - Full support for JSON with UTF-8, including decoding surrogate pairs + - Optional run-time support for common exceptions to the JSON specification + (infinity, NaN,..) + - No dependencies on other libraries + ]], + homepage = "http://www.kyne.com.au/~mark/software/lua-cjson.php", + license = "MIT" +} + +dependencies = { + "lua >= 5.1" +} + +build = { + type = "builtin", + modules = { + cjson = { + sources = { "lua_cjson.c", "strbuf.c", "fpconv.c" }, + defines = { +-- LuaRocks does not support platform specific configuration for Solaris. +-- Uncomment the line below on Solaris platforms if required. +-- "USE_INTERNAL_ISINF" + } + }, + ["cjson.safe"] = { + sources = { "lua_cjson.c", "strbuf.c", "fpconv.c" } + } + }, + install = { + lua = { + ["cjson.util"] = "lua/cjson/util.lua" + }, + bin = { + json2lua = "lua/json2lua.lua", + lua2json = "lua/lua2json.lua" + } + }, + -- Override default build options (per platform) + platforms = { + win32 = { modules = { cjson = { defines = { + "DISABLE_INVALID_NUMBERS", "USE_INTERNAL_ISINF" + } } } } + }, + copy_directories = { "tests" } +} + +-- vi:ai et sw=4 ts=4: \ No newline at end of file diff --git a/tests/packagedcode/data/rockspec/test1.rockspec b/tests/packagedcode/data/rockspec/test1.rockspec new file mode 100644 index 0000000000..8eef1f1a7a --- /dev/null +++ b/tests/packagedcode/data/rockspec/test1.rockspec @@ -0,0 +1,530 @@ +package = "kong" +version = "3.3.0-0" +rockspec_format = "3.0" +supported_platforms = {"linux", "macosx"} +source = { + url = "git+https://github.com/Kong/kong.git", + tag = "3.3.0" +} +description = { + summary = "Kong is a scalable and customizable API Management Layer built on top of Nginx.", + homepage = "https://konghq.com", + license = "Apache 2.0" +} +dependencies = { + "inspect == 3.1.3", + "luasec == 1.3.1", + "luasocket == 3.0-rc1", + "penlight == 1.13.1", + "lua-resty-http == 0.17.1", + "lua-resty-jit-uuid == 0.0.7", + "lua-ffi-zlib == 0.5", + "multipart == 0.5.9", + "version == 1.0.1", + "kong-lapis == 1.8.3.1", + "lua-cassandra == 1.5.2", + "pgmoon == 1.16.0", + "luatz == 0.4", + "lua_system_constants == 0.1.4", + "lyaml == 6.2.8", + "luasyslog == 2.0.1", + "lua_pack == 2.0.0", + "binaryheap >= 0.4", + "luaxxhash >= 1.0", + "lua-protobuf == 0.5.0", + "lua-resty-healthcheck == 1.6.2", + "lua-resty-mlcache == 2.6.0", + "lua-messagepack == 0.5.2", + "lua-resty-openssl == 0.8.20", + "lua-resty-counter == 0.2.1", + "lua-resty-ipmatcher == 0.6.1", + "lua-resty-acme == 0.11.0", + "lua-resty-session == 4.0.3", + "lua-resty-timer-ng == 0.2.5", + "lpeg == 1.0.2", +} +build = { + type = "builtin", + modules = { + ["kong"] = "kong/init.lua", + ["kong.meta"] = "kong/meta.lua", + ["kong.cache"] = "kong/cache/init.lua", + ["kong.cache.warmup"] = "kong/cache/warmup.lua", + ["kong.cache.marshall"] = "kong/cache/marshall.lua", + ["kong.global"] = "kong/global.lua", + ["kong.router"] = "kong/router/init.lua", + ["kong.router.traditional"] = "kong/router/traditional.lua", + ["kong.router.compat"] = "kong/router/compat.lua", + ["kong.router.expressions"] = "kong/router/expressions.lua", + ["kong.router.atc"] = "kong/router/atc.lua", + ["kong.router.utils"] = "kong/router/utils.lua", + ["kong.reports"] = "kong/reports.lua", + ["kong.constants"] = "kong/constants.lua", + ["kong.concurrency"] = "kong/concurrency.lua", + ["kong.deprecation"] = "kong/deprecation.lua", + ["kong.globalpatches"] = "kong/globalpatches.lua", + ["kong.error_handlers"] = "kong/error_handlers.lua", + ["kong.hooks"] = "kong/hooks.lua", + + ["kong.conf_loader"] = "kong/conf_loader/init.lua", + ["kong.conf_loader.listeners"] = "kong/conf_loader/listeners.lua", + + ["kong.clustering"] = "kong/clustering/init.lua", + ["kong.clustering.data_plane"] = "kong/clustering/data_plane.lua", + ["kong.clustering.control_plane"] = "kong/clustering/control_plane.lua", + ["kong.clustering.utils"] = "kong/clustering/utils.lua", + ["kong.clustering.events"] = "kong/clustering/events.lua", + ["kong.clustering.compat"] = "kong/clustering/compat/init.lua", + ["kong.clustering.compat.version"] = "kong/clustering/compat/version.lua", + ["kong.clustering.compat.removed_fields"] = "kong/clustering/compat/removed_fields.lua", + ["kong.clustering.compat.checkers"] = "kong/clustering/compat/checkers.lua", + ["kong.clustering.config_helper"] = "kong/clustering/config_helper.lua", + ["kong.clustering.tls"] = "kong/clustering/tls.lua", + + ["kong.cluster_events"] = "kong/cluster_events/init.lua", + ["kong.cluster_events.strategies.cassandra"] = "kong/cluster_events/strategies/cassandra.lua", + ["kong.cluster_events.strategies.postgres"] = "kong/cluster_events/strategies/postgres.lua", + ["kong.cluster_events.strategies.off"] = "kong/cluster_events/strategies/off.lua", + + ["kong.templates.nginx"] = "kong/templates/nginx.lua", + ["kong.templates.nginx_kong"] = "kong/templates/nginx_kong.lua", + ["kong.templates.nginx_kong_stream"] = "kong/templates/nginx_kong_stream.lua", + ["kong.templates.kong_defaults"] = "kong/templates/kong_defaults.lua", + ["kong.templates.kong_yml"] = "kong/templates/kong_yml.lua", + + ["kong.resty.dns.client"] = "kong/resty/dns/client.lua", + ["kong.resty.dns.utils"] = "kong/resty/dns/utils.lua", + ["kong.resty.ctx"] = "kong/resty/ctx.lua", + + ["kong.cmd"] = "kong/cmd/init.lua", + ["kong.cmd.roar"] = "kong/cmd/roar.lua", + ["kong.cmd.stop"] = "kong/cmd/stop.lua", + ["kong.cmd.quit"] = "kong/cmd/quit.lua", + ["kong.cmd.start"] = "kong/cmd/start.lua", + ["kong.cmd.check"] = "kong/cmd/check.lua", + ["kong.cmd.config"] = "kong/cmd/config.lua", + ["kong.cmd.reload"] = "kong/cmd/reload.lua", + ["kong.cmd.restart"] = "kong/cmd/restart.lua", + ["kong.cmd.prepare"] = "kong/cmd/prepare.lua", + ["kong.cmd.migrations"] = "kong/cmd/migrations.lua", + ["kong.cmd.health"] = "kong/cmd/health.lua", + ["kong.cmd.vault"] = "kong/cmd/vault.lua", + ["kong.cmd.version"] = "kong/cmd/version.lua", + ["kong.cmd.hybrid"] = "kong/cmd/hybrid.lua", + ["kong.cmd.utils.log"] = "kong/cmd/utils/log.lua", + ["kong.cmd.utils.kill"] = "kong/cmd/utils/kill.lua", + ["kong.cmd.utils.env"] = "kong/cmd/utils/env.lua", + ["kong.cmd.utils.migrations"] = "kong/cmd/utils/migrations.lua", + ["kong.cmd.utils.tty"] = "kong/cmd/utils/tty.lua", + ["kong.cmd.utils.nginx_signals"] = "kong/cmd/utils/nginx_signals.lua", + ["kong.cmd.utils.prefix_handler"] = "kong/cmd/utils/prefix_handler.lua", + ["kong.cmd.utils.process_secrets"] = "kong/cmd/utils/process_secrets.lua", + + ["kong.api"] = "kong/api/init.lua", + ["kong.api.api_helpers"] = "kong/api/api_helpers.lua", + ["kong.api.arguments"] = "kong/api/arguments.lua", + ["kong.api.endpoints"] = "kong/api/endpoints.lua", + ["kong.api.routes.kong"] = "kong/api/routes/kong.lua", + ["kong.api.routes.health"] = "kong/api/routes/health.lua", + ["kong.api.routes.config"] = "kong/api/routes/config.lua", + ["kong.api.routes.consumers"] = "kong/api/routes/consumers.lua", + ["kong.api.routes.plugins"] = "kong/api/routes/plugins.lua", + ["kong.api.routes.cache"] = "kong/api/routes/cache.lua", + ["kong.api.routes.upstreams"] = "kong/api/routes/upstreams.lua", + ["kong.api.routes.targets"] = "kong/api/routes/targets.lua", + ["kong.api.routes.certificates"] = "kong/api/routes/certificates.lua", + ["kong.api.routes.snis"] = "kong/api/routes/snis.lua", + ["kong.api.routes.tags"] = "kong/api/routes/tags.lua", + ["kong.api.routes.clustering"] = "kong/api/routes/clustering.lua", + ["kong.api.routes.debug"] = "kong/api/routes/debug.lua", + + ["kong.status"] = "kong/status/init.lua", + ["kong.status.ready"] = "kong/status/ready.lua", + + ["kong.tools.dns"] = "kong/tools/dns.lua", + ["kong.tools.grpc"] = "kong/tools/grpc.lua", + ["kong.tools.utils"] = "kong/tools/utils.lua", + ["kong.tools.timestamp"] = "kong/tools/timestamp.lua", + ["kong.tools.stream_api"] = "kong/tools/stream_api.lua", + ["kong.tools.queue"] = "kong/tools/queue.lua", + ["kong.tools.queue_schema"] = "kong/tools/queue_schema.lua", + ["kong.tools.sandbox"] = "kong/tools/sandbox.lua", + ["kong.tools.uri"] = "kong/tools/uri.lua", + ["kong.tools.kong-lua-sandbox"] = "kong/tools/kong-lua-sandbox.lua", + ["kong.tools.protobuf"] = "kong/tools/protobuf.lua", + ["kong.tools.mime_type"] = "kong/tools/mime_type.lua", + + ["kong.runloop.handler"] = "kong/runloop/handler.lua", + ["kong.runloop.events"] = "kong/runloop/events.lua", + ["kong.runloop.certificate"] = "kong/runloop/certificate.lua", + ["kong.runloop.plugins_iterator"] = "kong/runloop/plugins_iterator.lua", + ["kong.runloop.balancer"] = "kong/runloop/balancer/init.lua", + ["kong.runloop.balancer.balancers"] = "kong/runloop/balancer/balancers.lua", + ["kong.runloop.balancer.consistent_hashing"] = "kong/runloop/balancer/consistent_hashing.lua", + ["kong.runloop.balancer.healthcheckers"] = "kong/runloop/balancer/healthcheckers.lua", + ["kong.runloop.balancer.least_connections"] = "kong/runloop/balancer/least_connections.lua", + ["kong.runloop.balancer.latency"] = "kong/runloop/balancer/latency.lua", + ["kong.runloop.balancer.round_robin"] = "kong/runloop/balancer/round_robin.lua", + ["kong.runloop.balancer.targets"] = "kong/runloop/balancer/targets.lua", + ["kong.runloop.balancer.upstreams"] = "kong/runloop/balancer/upstreams.lua", + ["kong.runloop.plugin_servers"] = "kong/runloop/plugin_servers/init.lua", + ["kong.runloop.plugin_servers.process"] = "kong/runloop/plugin_servers/process.lua", + ["kong.runloop.plugin_servers.mp_rpc"] = "kong/runloop/plugin_servers/mp_rpc.lua", + ["kong.runloop.plugin_servers.pb_rpc"] = "kong/runloop/plugin_servers/pb_rpc.lua", + + ["kong.workspaces"] = "kong/workspaces/init.lua", + + ["kong.db"] = "kong/db/init.lua", + ["kong.db.errors"] = "kong/db/errors.lua", + ["kong.db.iteration"] = "kong/db/iteration.lua", + ["kong.db.dao"] = "kong/db/dao/init.lua", + ["kong.db.dao.certificates"] = "kong/db/dao/certificates.lua", + ["kong.db.dao.snis"] = "kong/db/dao/snis.lua", + ["kong.db.dao.targets"] = "kong/db/dao/targets.lua", + ["kong.db.dao.plugins"] = "kong/db/dao/plugins.lua", + ["kong.db.dao.tags"] = "kong/db/dao/tags.lua", + ["kong.db.dao.vaults"] = "kong/db/dao/vaults.lua", + ["kong.db.dao.workspaces"] = "kong/db/dao/workspaces.lua", + ["kong.db.declarative"] = "kong/db/declarative/init.lua", + ["kong.db.declarative.marshaller"] = "kong/db/declarative/marshaller.lua", + ["kong.db.declarative.export"] = "kong/db/declarative/export.lua", + ["kong.db.declarative.import"] = "kong/db/declarative/import.lua", + ["kong.db.schema"] = "kong/db/schema/init.lua", + ["kong.db.dao.keys"] = "kong/db/dao/keys.lua", + ["kong.db.dao.key_sets"] = "kong/db/dao/key_sets.lua", + ["kong.db.schema.entities.keys"] = "kong/db/schema/entities/keys.lua", + ["kong.db.schema.entities.key_sets"] = "kong/db/schema/entities/key_sets.lua", + ["kong.db.schema.entities.consumers"] = "kong/db/schema/entities/consumers.lua", + ["kong.db.schema.entities.routes"] = "kong/db/schema/entities/routes.lua", + ["kong.db.schema.entities.routes_subschemas"] = "kong/db/schema/entities/routes_subschemas.lua", + ["kong.db.schema.entities.services"] = "kong/db/schema/entities/services.lua", + ["kong.db.schema.entities.certificates"] = "kong/db/schema/entities/certificates.lua", + ["kong.db.schema.entities.snis"] = "kong/db/schema/entities/snis.lua", + ["kong.db.schema.entities.upstreams"] = "kong/db/schema/entities/upstreams.lua", + ["kong.db.schema.entities.targets"] = "kong/db/schema/entities/targets.lua", + ["kong.db.schema.entities.plugins"] = "kong/db/schema/entities/plugins.lua", + ["kong.db.schema.entities.tags"] = "kong/db/schema/entities/tags.lua", + ["kong.db.schema.entities.ca_certificates"] = "kong/db/schema/entities/ca_certificates.lua", + ["kong.db.schema.entities.vaults"] = "kong/db/schema/entities/vaults.lua", + ["kong.db.schema.entities.workspaces"] = "kong/db/schema/entities/workspaces.lua", + ["kong.db.schema.entities.clustering_data_planes"] = "kong/db/schema/entities/clustering_data_planes.lua", + ["kong.db.schema.entities.parameters"] = "kong/db/schema/entities/parameters.lua", + ["kong.db.schema.others.migrations"] = "kong/db/schema/others/migrations.lua", + ["kong.db.schema.others.declarative_config"] = "kong/db/schema/others/declarative_config.lua", + ["kong.db.schema.entity"] = "kong/db/schema/entity.lua", + ["kong.db.schema.metaschema"] = "kong/db/schema/metaschema.lua", + ["kong.db.schema.typedefs"] = "kong/db/schema/typedefs.lua", + ["kong.db.schema.plugin_loader"] = "kong/db/schema/plugin_loader.lua", + ["kong.db.schema.vault_loader"] = "kong/db/schema/vault_loader.lua", + ["kong.db.schema.topological_sort"] = "kong/db/schema/topological_sort.lua", + ["kong.db.strategies"] = "kong/db/strategies/init.lua", + ["kong.db.strategies.connector"] = "kong/db/strategies/connector.lua", + ["kong.db.strategies.cassandra"] = "kong/db/strategies/cassandra/init.lua", + ["kong.db.strategies.cassandra.connector"] = "kong/db/strategies/cassandra/connector.lua", + ["kong.db.strategies.cassandra.tags"] = "kong/db/strategies/cassandra/tags.lua", + ["kong.db.strategies.postgres"] = "kong/db/strategies/postgres/init.lua", + ["kong.db.strategies.postgres.connector"] = "kong/db/strategies/postgres/connector.lua", + ["kong.db.strategies.postgres.tags"] = "kong/db/strategies/postgres/tags.lua", + ["kong.db.strategies.off"] = "kong/db/strategies/off/init.lua", + ["kong.db.strategies.off.connector"] = "kong/db/strategies/off/connector.lua", + ["kong.db.strategies.off.tags"] = "kong/db/strategies/off/tags.lua", + + ["kong.db.migrations.state"] = "kong/db/migrations/state.lua", + ["kong.db.migrations.subsystems"] = "kong/db/migrations/subsystems.lua", + ["kong.db.migrations.core"] = "kong/db/migrations/core/init.lua", + ["kong.db.migrations.core.000_base"] = "kong/db/migrations/core/000_base.lua", + ["kong.db.migrations.core.003_100_to_110"] = "kong/db/migrations/core/003_100_to_110.lua", + ["kong.db.migrations.core.004_110_to_120"] = "kong/db/migrations/core/004_110_to_120.lua", + ["kong.db.migrations.core.005_120_to_130"] = "kong/db/migrations/core/005_120_to_130.lua", + ["kong.db.migrations.core.006_130_to_140"] = "kong/db/migrations/core/006_130_to_140.lua", + ["kong.db.migrations.core.007_140_to_150"] = "kong/db/migrations/core/007_140_to_150.lua", + ["kong.db.migrations.core.008_150_to_200"] = "kong/db/migrations/core/008_150_to_200.lua", + ["kong.db.migrations.core.009_200_to_210"] = "kong/db/migrations/core/009_200_to_210.lua", + ["kong.db.migrations.core.010_210_to_211"] = "kong/db/migrations/core/010_210_to_211.lua", + ["kong.db.migrations.core.011_212_to_213"] = "kong/db/migrations/core/011_212_to_213.lua", + ["kong.db.migrations.core.012_213_to_220"] = "kong/db/migrations/core/012_213_to_220.lua", + ["kong.db.migrations.core.013_220_to_230"] = "kong/db/migrations/core/013_220_to_230.lua", + ["kong.db.migrations.core.014_230_to_270"] = "kong/db/migrations/core/014_230_to_270.lua", + ["kong.db.migrations.core.015_270_to_280"] = "kong/db/migrations/core/015_270_to_280.lua", + ["kong.db.migrations.core.016_280_to_300"] = "kong/db/migrations/core/016_280_to_300.lua", + ["kong.db.migrations.core.017_300_to_310"] = "kong/db/migrations/core/017_300_to_310.lua", + ["kong.db.migrations.core.018_310_to_320"] = "kong/db/migrations/core/018_310_to_320.lua", + ["kong.db.migrations.core.019_320_to_330"] = "kong/db/migrations/core/019_320_to_330.lua", + ["kong.db.migrations.operations.200_to_210"] = "kong/db/migrations/operations/200_to_210.lua", + ["kong.db.migrations.operations.210_to_211"] = "kong/db/migrations/operations/210_to_211.lua", + ["kong.db.migrations.operations.212_to_213"] = "kong/db/migrations/operations/212_to_213.lua", + ["kong.db.migrations.operations.280_to_300"] = "kong/db/migrations/operations/280_to_300.lua", + ["kong.db.migrations.migrate_path_280_300"] = "kong/db/migrations/migrate_path_280_300.lua", + ["kong.db.declarative.migrations"] = "kong/db/declarative/migrations/init.lua", + ["kong.db.declarative.migrations.route_path"] = "kong/db/declarative/migrations/route_path.lua", + + ["kong.pdk"] = "kong/pdk/init.lua", + ["kong.pdk.private.checks"] = "kong/pdk/private/checks.lua", + ["kong.pdk.private.phases"] = "kong/pdk/private/phases.lua", + ["kong.pdk.private.node"] = "kong/pdk/private/node.lua", + ["kong.pdk.client"] = "kong/pdk/client.lua", + ["kong.pdk.client.tls"] = "kong/pdk/client/tls.lua", + ["kong.pdk.ctx"] = "kong/pdk/ctx.lua", + ["kong.pdk.ip"] = "kong/pdk/ip.lua", + ["kong.pdk.log"] = "kong/pdk/log.lua", + ["kong.pdk.service"] = "kong/pdk/service.lua", + ["kong.pdk.service.request"] = "kong/pdk/service/request.lua", + ["kong.pdk.service.response"] = "kong/pdk/service/response.lua", + ["kong.pdk.router"] = "kong/pdk/router.lua", + ["kong.pdk.request"] = "kong/pdk/request.lua", + ["kong.pdk.response"] = "kong/pdk/response.lua", + ["kong.pdk.table"] = "kong/pdk/table.lua", + ["kong.pdk.node"] = "kong/pdk/node.lua", + ["kong.pdk.nginx"] = "kong/pdk/nginx.lua", + ["kong.pdk.cluster"] = "kong/pdk/cluster.lua", + ["kong.pdk.vault"] = "kong/pdk/vault.lua", + ["kong.pdk.tracing"] = "kong/pdk/tracing.lua", + ["kong.pdk.plugin"] = "kong/pdk/plugin.lua", + + ["kong.plugins.basic-auth.migrations"] = "kong/plugins/basic-auth/migrations/init.lua", + ["kong.plugins.basic-auth.migrations.000_base_basic_auth"] = "kong/plugins/basic-auth/migrations/000_base_basic_auth.lua", + ["kong.plugins.basic-auth.migrations.002_130_to_140"] = "kong/plugins/basic-auth/migrations/002_130_to_140.lua", + ["kong.plugins.basic-auth.migrations.003_200_to_210"] = "kong/plugins/basic-auth/migrations/003_200_to_210.lua", + ["kong.plugins.basic-auth.crypto"] = "kong/plugins/basic-auth/crypto.lua", + ["kong.plugins.basic-auth.handler"] = "kong/plugins/basic-auth/handler.lua", + ["kong.plugins.basic-auth.access"] = "kong/plugins/basic-auth/access.lua", + ["kong.plugins.basic-auth.schema"] = "kong/plugins/basic-auth/schema.lua", + ["kong.plugins.basic-auth.daos"] = "kong/plugins/basic-auth/daos.lua", + + ["kong.plugins.key-auth.migrations"] = "kong/plugins/key-auth/migrations/init.lua", + ["kong.plugins.key-auth.migrations.000_base_key_auth"] = "kong/plugins/key-auth/migrations/000_base_key_auth.lua", + ["kong.plugins.key-auth.migrations.002_130_to_140"] = "kong/plugins/key-auth/migrations/002_130_to_140.lua", + ["kong.plugins.key-auth.migrations.003_200_to_210"] = "kong/plugins/key-auth/migrations/003_200_to_210.lua", + ["kong.plugins.key-auth.migrations.004_320_to_330"] = "kong/plugins/key-auth/migrations/004_320_to_330.lua", + ["kong.plugins.key-auth.handler"] = "kong/plugins/key-auth/handler.lua", + ["kong.plugins.key-auth.schema"] = "kong/plugins/key-auth/schema.lua", + ["kong.plugins.key-auth.daos"] = "kong/plugins/key-auth/daos.lua", + + ["kong.plugins.oauth2.migrations"] = "kong/plugins/oauth2/migrations/init.lua", + ["kong.plugins.oauth2.migrations.000_base_oauth2"] = "kong/plugins/oauth2/migrations/000_base_oauth2.lua", + ["kong.plugins.oauth2.migrations.003_130_to_140"] = "kong/plugins/oauth2/migrations/003_130_to_140.lua", + ["kong.plugins.oauth2.migrations.004_200_to_210"] = "kong/plugins/oauth2/migrations/004_200_to_210.lua", + ["kong.plugins.oauth2.migrations.005_210_to_211"] = "kong/plugins/oauth2/migrations/005_210_to_211.lua", + ["kong.plugins.oauth2.migrations.006_320_to_330"] = "kong/plugins/oauth2/migrations/006_320_to_330.lua", + ["kong.plugins.oauth2.migrations.007_320_to_330"] = "kong/plugins/oauth2/migrations/007_320_to_330.lua", + ["kong.plugins.oauth2.handler"] = "kong/plugins/oauth2/handler.lua", + ["kong.plugins.oauth2.secret"] = "kong/plugins/oauth2/secret.lua", + ["kong.plugins.oauth2.access"] = "kong/plugins/oauth2/access.lua", + ["kong.plugins.oauth2.schema"] = "kong/plugins/oauth2/schema.lua", + ["kong.plugins.oauth2.daos"] = "kong/plugins/oauth2/daos.lua", + ["kong.plugins.oauth2.daos.oauth2_tokens"] = "kong/plugins/oauth2/daos/oauth2_tokens.lua", + + ["kong.plugins.tcp-log.handler"] = "kong/plugins/tcp-log/handler.lua", + ["kong.plugins.tcp-log.schema"] = "kong/plugins/tcp-log/schema.lua", + + ["kong.plugins.udp-log.handler"] = "kong/plugins/udp-log/handler.lua", + ["kong.plugins.udp-log.schema"] = "kong/plugins/udp-log/schema.lua", + + ["kong.plugins.http-log.handler"] = "kong/plugins/http-log/handler.lua", + ["kong.plugins.http-log.schema"] = "kong/plugins/http-log/schema.lua", + ["kong.plugins.http-log.migrations"] = "kong/plugins/http-log/migrations/init.lua", + ["kong.plugins.http-log.migrations.001_280_to_300"] = "kong/plugins/http-log/migrations/001_280_to_300.lua", + + ["kong.plugins.file-log.handler"] = "kong/plugins/file-log/handler.lua", + ["kong.plugins.file-log.schema"] = "kong/plugins/file-log/schema.lua", + + ["kong.plugins.rate-limiting.migrations"] = "kong/plugins/rate-limiting/migrations/init.lua", + ["kong.plugins.rate-limiting.migrations.000_base_rate_limiting"] = "kong/plugins/rate-limiting/migrations/000_base_rate_limiting.lua", + ["kong.plugins.rate-limiting.migrations.003_10_to_112"] = "kong/plugins/rate-limiting/migrations/003_10_to_112.lua", + ["kong.plugins.rate-limiting.migrations.004_200_to_210"] = "kong/plugins/rate-limiting/migrations/004_200_to_210.lua", + ["kong.plugins.rate-limiting.migrations.005_320_to_330"] = "kong/plugins/rate-limiting/migrations/005_320_to_330.lua", + ["kong.plugins.rate-limiting.expiration"] = "kong/plugins/rate-limiting/expiration.lua", + ["kong.plugins.rate-limiting.handler"] = "kong/plugins/rate-limiting/handler.lua", + ["kong.plugins.rate-limiting.schema"] = "kong/plugins/rate-limiting/schema.lua", + ["kong.plugins.rate-limiting.daos"] = "kong/plugins/rate-limiting/daos.lua", + ["kong.plugins.rate-limiting.policies"] = "kong/plugins/rate-limiting/policies/init.lua", + ["kong.plugins.rate-limiting.policies.cluster"] = "kong/plugins/rate-limiting/policies/cluster.lua", + + ["kong.plugins.response-ratelimiting.migrations"] = "kong/plugins/response-ratelimiting/migrations/init.lua", + ["kong.plugins.response-ratelimiting.migrations.000_base_response_rate_limiting"] = "kong/plugins/response-ratelimiting/migrations/000_base_response_rate_limiting.lua", + ["kong.plugins.response-ratelimiting.handler"] = "kong/plugins/response-ratelimiting/handler.lua", + ["kong.plugins.response-ratelimiting.access"] = "kong/plugins/response-ratelimiting/access.lua", + ["kong.plugins.response-ratelimiting.header_filter"] = "kong/plugins/response-ratelimiting/header_filter.lua", + ["kong.plugins.response-ratelimiting.log"] = "kong/plugins/response-ratelimiting/log.lua", + ["kong.plugins.response-ratelimiting.schema"] = "kong/plugins/response-ratelimiting/schema.lua", + ["kong.plugins.response-ratelimiting.policies"] = "kong/plugins/response-ratelimiting/policies/init.lua", + ["kong.plugins.response-ratelimiting.policies.cluster"] = "kong/plugins/response-ratelimiting/policies/cluster.lua", + + ["kong.plugins.request-size-limiting.handler"] = "kong/plugins/request-size-limiting/handler.lua", + ["kong.plugins.request-size-limiting.schema"] = "kong/plugins/request-size-limiting/schema.lua", + + ["kong.plugins.response-transformer.handler"] = "kong/plugins/response-transformer/handler.lua", + ["kong.plugins.response-transformer.body_transformer"] = "kong/plugins/response-transformer/body_transformer.lua", + ["kong.plugins.response-transformer.header_transformer"] = "kong/plugins/response-transformer/header_transformer.lua", + ["kong.plugins.response-transformer.schema"] = "kong/plugins/response-transformer/schema.lua", + + ["kong.plugins.cors.handler"] = "kong/plugins/cors/handler.lua", + ["kong.plugins.cors.schema"] = "kong/plugins/cors/schema.lua", + + ["kong.plugins.ip-restriction.handler"] = "kong/plugins/ip-restriction/handler.lua", + ["kong.plugins.ip-restriction.schema"] = "kong/plugins/ip-restriction/schema.lua", + ["kong.plugins.ip-restriction.migrations"] = "kong/plugins/ip-restriction/migrations/init.lua", + ["kong.plugins.ip-restriction.migrations.001_200_to_210"] = "kong/plugins/ip-restriction/migrations/001_200_to_210.lua", + + ["kong.plugins.acl.migrations"] = "kong/plugins/acl/migrations/init.lua", + ["kong.plugins.acl.migrations.000_base_acl"] = "kong/plugins/acl/migrations/000_base_acl.lua", + ["kong.plugins.acl.migrations.002_130_to_140"] = "kong/plugins/acl/migrations/002_130_to_140.lua", + ["kong.plugins.acl.migrations.003_200_to_210"] = "kong/plugins/acl/migrations/003_200_to_210.lua", + ["kong.plugins.acl.migrations.004_212_to_213"] = "kong/plugins/acl/migrations/004_212_to_213.lua", + ["kong.plugins.acl.handler"] = "kong/plugins/acl/handler.lua", + ["kong.plugins.acl.schema"] = "kong/plugins/acl/schema.lua", + ["kong.plugins.acl.daos"] = "kong/plugins/acl/daos.lua", + ["kong.plugins.acl.groups"] = "kong/plugins/acl/groups.lua", + ["kong.plugins.acl.acls"] = "kong/plugins/acl/acls.lua", + ["kong.plugins.acl.api"] = "kong/plugins/acl/api.lua", + + ["kong.plugins.correlation-id.handler"] = "kong/plugins/correlation-id/handler.lua", + ["kong.plugins.correlation-id.schema"] = "kong/plugins/correlation-id/schema.lua", + + ["kong.plugins.jwt.migrations"] = "kong/plugins/jwt/migrations/init.lua", + ["kong.plugins.jwt.migrations.000_base_jwt"] = "kong/plugins/jwt/migrations/000_base_jwt.lua", + ["kong.plugins.jwt.migrations.002_130_to_140"] = "kong/plugins/jwt/migrations/002_130_to_140.lua", + ["kong.plugins.jwt.migrations.003_200_to_210"] = "kong/plugins/jwt/migrations/003_200_to_210.lua", + ["kong.plugins.jwt.handler"] = "kong/plugins/jwt/handler.lua", + ["kong.plugins.jwt.schema"] = "kong/plugins/jwt/schema.lua", + ["kong.plugins.jwt.daos"] = "kong/plugins/jwt/daos.lua", + ["kong.plugins.jwt.jwt_parser"] = "kong/plugins/jwt/jwt_parser.lua", + + ["kong.plugins.hmac-auth.migrations"] = "kong/plugins/hmac-auth/migrations/init.lua", + ["kong.plugins.hmac-auth.migrations.000_base_hmac_auth"] = "kong/plugins/hmac-auth/migrations/000_base_hmac_auth.lua", + ["kong.plugins.hmac-auth.migrations.002_130_to_140"] = "kong/plugins/hmac-auth/migrations/002_130_to_140.lua", + ["kong.plugins.hmac-auth.migrations.003_200_to_210"] = "kong/plugins/hmac-auth/migrations/003_200_to_210.lua", + ["kong.plugins.hmac-auth.handler"] = "kong/plugins/hmac-auth/handler.lua", + ["kong.plugins.hmac-auth.access"] = "kong/plugins/hmac-auth/access.lua", + ["kong.plugins.hmac-auth.schema"] = "kong/plugins/hmac-auth/schema.lua", + ["kong.plugins.hmac-auth.daos"] = "kong/plugins/hmac-auth/daos.lua", + + ["kong.plugins.ldap-auth.handler"] = "kong/plugins/ldap-auth/handler.lua", + ["kong.plugins.ldap-auth.access"] = "kong/plugins/ldap-auth/access.lua", + ["kong.plugins.ldap-auth.schema"] = "kong/plugins/ldap-auth/schema.lua", + ["kong.plugins.ldap-auth.ldap"] = "kong/plugins/ldap-auth/ldap.lua", + ["kong.plugins.ldap-auth.asn1"] = "kong/plugins/ldap-auth/asn1.lua", + + ["kong.plugins.syslog.handler"] = "kong/plugins/syslog/handler.lua", + ["kong.plugins.syslog.schema"] = "kong/plugins/syslog/schema.lua", + + ["kong.plugins.loggly.handler"] = "kong/plugins/loggly/handler.lua", + ["kong.plugins.loggly.schema"] = "kong/plugins/loggly/schema.lua", + + ["kong.plugins.datadog.handler"] = "kong/plugins/datadog/handler.lua", + ["kong.plugins.datadog.schema"] = "kong/plugins/datadog/schema.lua", + ["kong.plugins.datadog.statsd_logger"] = "kong/plugins/datadog/statsd_logger.lua", + + ["kong.plugins.statsd.constants"] = "kong/plugins/statsd/constants.lua", + ["kong.plugins.statsd.handler"] = "kong/plugins/statsd/handler.lua", + ["kong.plugins.statsd.log"] = "kong/plugins/statsd/log.lua", + ["kong.plugins.statsd.schema"] = "kong/plugins/statsd/schema.lua", + ["kong.plugins.statsd.statsd_logger"] = "kong/plugins/statsd/statsd_logger.lua", + + ["kong.plugins.bot-detection.handler"] = "kong/plugins/bot-detection/handler.lua", + ["kong.plugins.bot-detection.schema"] = "kong/plugins/bot-detection/schema.lua", + ["kong.plugins.bot-detection.rules"] = "kong/plugins/bot-detection/rules.lua", + ["kong.plugins.bot-detection.migrations"] = "kong/plugins/bot-detection/migrations/init.lua", + ["kong.plugins.bot-detection.migrations.001_200_to_210"] = "kong/plugins/bot-detection/migrations/001_200_to_210.lua", + + ["kong.plugins.request-termination.handler"] = "kong/plugins/request-termination/handler.lua", + ["kong.plugins.request-termination.schema"] = "kong/plugins/request-termination/schema.lua", + + ["kong.plugins.aws-lambda.aws-serializer"] = "kong/plugins/aws-lambda/aws-serializer.lua", + ["kong.plugins.aws-lambda.handler"] = "kong/plugins/aws-lambda/handler.lua", + ["kong.plugins.aws-lambda.iam-ec2-credentials"] = "kong/plugins/aws-lambda/iam-ec2-credentials.lua", + ["kong.plugins.aws-lambda.iam-ecs-credentials"] = "kong/plugins/aws-lambda/iam-ecs-credentials.lua", + ["kong.plugins.aws-lambda.iam-sts-credentials"] = "kong/plugins/aws-lambda/iam-sts-credentials.lua", + ["kong.plugins.aws-lambda.schema"] = "kong/plugins/aws-lambda/schema.lua", + ["kong.plugins.aws-lambda.v4"] = "kong/plugins/aws-lambda/v4.lua", + ["kong.plugins.aws-lambda.request-util"] = "kong/plugins/aws-lambda/request-util.lua", + + ["kong.plugins.grpc-gateway.deco"] = "kong/plugins/grpc-gateway/deco.lua", + ["kong.plugins.grpc-gateway.handler"] = "kong/plugins/grpc-gateway/handler.lua", + ["kong.plugins.grpc-gateway.schema"] = "kong/plugins/grpc-gateway/schema.lua", + + ["kong.plugins.acme.api"] = "kong/plugins/acme/api.lua", + ["kong.plugins.acme.client"] = "kong/plugins/acme/client.lua", + ["kong.plugins.acme.daos"] = "kong/plugins/acme/daos.lua", + ["kong.plugins.acme.handler"] = "kong/plugins/acme/handler.lua", + ["kong.plugins.acme.migrations.000_base_acme"] = "kong/plugins/acme/migrations/000_base_acme.lua", + ["kong.plugins.acme.migrations.001_280_to_300"] = "kong/plugins/acme/migrations/001_280_to_300.lua", + ["kong.plugins.acme.migrations.002_320_to_330"] = "kong/plugins/acme/migrations/002_320_to_330.lua", + ["kong.plugins.acme.migrations"] = "kong/plugins/acme/migrations/init.lua", + ["kong.plugins.acme.schema"] = "kong/plugins/acme/schema.lua", + ["kong.plugins.acme.storage.kong"] = "kong/plugins/acme/storage/kong.lua", + ["kong.plugins.acme.reserved_words"] = "kong/plugins/acme/reserved_words.lua", + + ["kong.plugins.prometheus.api"] = "kong/plugins/prometheus/api.lua", + ["kong.plugins.prometheus.status_api"] = "kong/plugins/prometheus/status_api.lua", + ["kong.plugins.prometheus.exporter"] = "kong/plugins/prometheus/exporter.lua", + ["kong.plugins.prometheus.handler"] = "kong/plugins/prometheus/handler.lua", + ["kong.plugins.prometheus.prometheus"] = "kong/plugins/prometheus/prometheus.lua", + ["kong.plugins.prometheus.serve"] = "kong/plugins/prometheus/serve.lua", + ["kong.plugins.prometheus.schema"] = "kong/plugins/prometheus/schema.lua", + + ["kong.plugins.session.handler"] = "kong/plugins/session/handler.lua", + ["kong.plugins.session.schema"] = "kong/plugins/session/schema.lua", + ["kong.plugins.session.access"] = "kong/plugins/session/access.lua", + ["kong.plugins.session.header_filter"] = "kong/plugins/session/header_filter.lua", + ["kong.plugins.session.session"] = "kong/plugins/session/session.lua", + ["kong.plugins.session.daos"] = "kong/plugins/session/daos.lua", + ["kong.plugins.session.storage.kong"] = "kong/plugins/session/storage/kong.lua", + ["kong.plugins.session.migrations.000_base_session"] = "kong/plugins/session/migrations/000_base_session.lua", + ["kong.plugins.session.migrations.001_add_ttl_index"] = "kong/plugins/session/migrations/001_add_ttl_index.lua", + ["kong.plugins.session.migrations.002_320_to_330"] = "kong/plugins/session/migrations/002_320_to_330.lua", + ["kong.plugins.session.migrations"] = "kong/plugins/session/migrations/init.lua", + + ["kong.plugins.proxy-cache.handler"] = "kong/plugins/proxy-cache/handler.lua", + ["kong.plugins.proxy-cache.cache_key"] = "kong/plugins/proxy-cache/cache_key.lua", + ["kong.plugins.proxy-cache.schema"] = "kong/plugins/proxy-cache/schema.lua", + ["kong.plugins.proxy-cache.api"] = "kong/plugins/proxy-cache/api.lua", + ["kong.plugins.proxy-cache.strategies"] = "kong/plugins/proxy-cache/strategies/init.lua", + ["kong.plugins.proxy-cache.strategies.memory"] = "kong/plugins/proxy-cache/strategies/memory.lua", + + ["kong.plugins.grpc-web.deco"] = "kong/plugins/grpc-web/deco.lua", + ["kong.plugins.grpc-web.handler"] = "kong/plugins/grpc-web/handler.lua", + ["kong.plugins.grpc-web.schema"] = "kong/plugins/grpc-web/schema.lua", + + ["kong.plugins.pre-function._handler"] = "kong/plugins/pre-function/_handler.lua", + ["kong.plugins.pre-function._schema"] = "kong/plugins/pre-function/_schema.lua", + ["kong.plugins.pre-function.migrations._001_280_to_300"] = "kong/plugins/pre-function/migrations/_001_280_to_300.lua", + + ["kong.plugins.pre-function.handler"] = "kong/plugins/pre-function/handler.lua", + ["kong.plugins.pre-function.schema"] = "kong/plugins/pre-function/schema.lua", + ["kong.plugins.pre-function.migrations"] = "kong/plugins/pre-function/migrations/init.lua", + ["kong.plugins.pre-function.migrations.001_280_to_300"] = "kong/plugins/pre-function/migrations/001_280_to_300.lua", + + ["kong.plugins.post-function.handler"] = "kong/plugins/post-function/handler.lua", + ["kong.plugins.post-function.schema"] = "kong/plugins/post-function/schema.lua", + ["kong.plugins.post-function.migrations"] = "kong/plugins/post-function/migrations/init.lua", + ["kong.plugins.post-function.migrations.001_280_to_300"] = "kong/plugins/post-function/migrations/001_280_to_300.lua", + + ["kong.plugins.zipkin.handler"] = "kong/plugins/zipkin/handler.lua", + ["kong.plugins.zipkin.reporter"] = "kong/plugins/zipkin/reporter.lua", + ["kong.plugins.zipkin.span"] = "kong/plugins/zipkin/span.lua", + ["kong.plugins.zipkin.schema"] = "kong/plugins/zipkin/schema.lua", + ["kong.plugins.zipkin.request_tags"] = "kong/plugins/zipkin/request_tags.lua", + + ["kong.plugins.request-transformer.migrations.cassandra"] = "kong/plugins/request-transformer/migrations/cassandra.lua", + ["kong.plugins.request-transformer.migrations.postgres"] = "kong/plugins/request-transformer/migrations/postgres.lua", + ["kong.plugins.request-transformer.migrations.common"] = "kong/plugins/request-transformer/migrations/common.lua", + ["kong.plugins.request-transformer.handler"] = "kong/plugins/request-transformer/handler.lua", + ["kong.plugins.request-transformer.access"] = "kong/plugins/request-transformer/access.lua", + ["kong.plugins.request-transformer.schema"] = "kong/plugins/request-transformer/schema.lua", + + ["kong.plugins.azure-functions.handler"] = "kong/plugins/azure-functions/handler.lua", + ["kong.plugins.azure-functions.schema"] = "kong/plugins/azure-functions/schema.lua", + + ["kong.plugins.opentelemetry.handler"] = "kong/plugins/opentelemetry/handler.lua", + ["kong.plugins.opentelemetry.schema"] = "kong/plugins/opentelemetry/schema.lua", + ["kong.plugins.opentelemetry.proto"] = "kong/plugins/opentelemetry/proto.lua", + ["kong.plugins.opentelemetry.otlp"] = "kong/plugins/opentelemetry/otlp.lua", + + ["kong.vaults.env"] = "kong/vaults/env/init.lua", + ["kong.vaults.env.schema"] = "kong/vaults/env/schema.lua", + + ["kong.tracing.instrumentation"] = "kong/tracing/instrumentation.lua", + ["kong.tracing.propagation"] = "kong/tracing/propagation.lua", + } +} diff --git a/tests/packagedcode/data/rockspec/test2.rockspec b/tests/packagedcode/data/rockspec/test2.rockspec new file mode 100644 index 0000000000..66826a3efd --- /dev/null +++ b/tests/packagedcode/data/rockspec/test2.rockspec @@ -0,0 +1,135 @@ +package = "LuaSocket" +version = "scm-3" +source = { + url = "git+https://github.com/lunarmodules/luasocket.git", + branch = "master" +} +description = { + summary = "Network support for the Lua language", + detailed = [[ + LuaSocket is a Lua extension library composed of two parts: a set of C + modules that provide support for the TCP and UDP transport layers, and a + set of Lua modules that provide functions commonly needed by applications + that deal with the Internet. + ]], + homepage = "https://github.com/lunarmodules/luasocket", + license = "MIT" +} +dependencies = { + "lua >= 5.1" +} + +local function make_plat(plat) + local defines = { + unix = { + "LUASOCKET_DEBUG" + }, + macosx = { + "LUASOCKET_DEBUG", + "UNIX_HAS_SUN_LEN" + }, + win32 = { + "LUASOCKET_DEBUG", + "NDEBUG" + }, + mingw32 = { + "LUASOCKET_DEBUG", + -- "LUASOCKET_INET_PTON", + "WINVER=0x0501" + } + } + local modules = { + ["socket.core"] = { + sources = { + "src/luasocket.c" + , "src/timeout.c" + , "src/buffer.c" + , "src/io.c" + , "src/auxiliar.c" + , "src/options.c" + , "src/inet.c" + , "src/except.c" + , "src/select.c" + , "src/tcp.c" + , "src/udp.c" + , "src/compat.c" }, + defines = defines[plat], + incdir = "/src" + }, + ["mime.core"] = { + sources = { "src/mime.c", "src/compat.c" }, + defines = defines[plat], + incdir = "/src" + }, + ["socket.http"] = "src/http.lua", + ["socket.url"] = "src/url.lua", + ["socket.tp"] = "src/tp.lua", + ["socket.ftp"] = "src/ftp.lua", + ["socket.headers"] = "src/headers.lua", + ["socket.smtp"] = "src/smtp.lua", + ltn12 = "src/ltn12.lua", + socket = "src/socket.lua", + mbox = "src/mbox.lua", + mime = "src/mime.lua" + } + if plat == "unix" + or plat == "macosx" + or plat == "haiku" + then + modules["socket.core"].sources[#modules["socket.core"].sources+1] = "src/usocket.c" + if plat == "haiku" then + modules["socket.core"].libraries = {"network"} + end + modules["socket.unix"] = { + sources = { + "src/buffer.c" + , "src/compat.c" + , "src/auxiliar.c" + , "src/options.c" + , "src/timeout.c" + , "src/io.c" + , "src/usocket.c" + , "src/unix.c" + , "src/unixdgram.c" + , "src/unixstream.c" }, + defines = defines[plat], + incdir = "/src" + } + modules["socket.serial"] = { + sources = { + "src/buffer.c" + , "src/compat.c" + , "src/auxiliar.c" + , "src/options.c" + , "src/timeout.c" + , "src/io.c" + , "src/usocket.c" + , "src/serial.c" }, + defines = defines[plat], + incdir = "/src" + } + end + if plat == "win32" + or plat == "mingw32" + then + modules["socket.core"].sources[#modules["socket.core"].sources+1] = "src/wsocket.c" + modules["socket.core"].libraries = { "ws2_32" } + modules["socket.core"].libdirs = {} + end + return { modules = modules } +end + +build = { + type = "builtin", + platforms = { + unix = make_plat("unix"), + macosx = make_plat("macosx"), + haiku = make_plat("haiku"), + win32 = make_plat("win32"), + mingw32 = make_plat("mingw32") + }, + copy_directories = { + "docs" + , "samples" + , "test" } +} \ No newline at end of file diff --git a/tests/packagedcode/data/rockspec/test3.rockspec b/tests/packagedcode/data/rockspec/test3.rockspec new file mode 100644 index 0000000000..8fde441812 --- /dev/null +++ b/tests/packagedcode/data/rockspec/test3.rockspec @@ -0,0 +1,55 @@ +rockspec_format = "3.0" +package = "vdsl" +version = "0.1.0-1" + +source = { + url = "git+https://github.com/ynishi/vdsl.git", + tag = "v0.1.0", +} + +description = { + summary = "Visual DSL for ComfyUI", + detailed = [[ + vdsl transforms semantic scene composition into ComfyUI node graphs. + Pure Lua. Zero dependencies. + Images become portable project files through PNG-embedded recipes. + ]], + homepage = "https://github.com/ynishi/vdsl", + license = "MIT", + labels = { "comfyui", "dsl", "image-generation", "stable-diffusion" }, +} + +dependencies = { + "lua >= 5.1", +} + +build = { + type = "builtin", + modules = { + ["vdsl"] = "lua/vdsl/init.lua", + ["vdsl.entity"] = "lua/vdsl/entity.lua", + ["vdsl.trait"] = "lua/vdsl/trait.lua", + ["vdsl.subject"] = "lua/vdsl/subject.lua", + ["vdsl.weight"] = "lua/vdsl/weight.lua", + ["vdsl.world"] = "lua/vdsl/world.lua", + ["vdsl.cast"] = "lua/vdsl/cast.lua", + ["vdsl.stage"] = "lua/vdsl/stage.lua", + ["vdsl.post"] = "lua/vdsl/post.lua", + ["vdsl.catalog"] = "lua/vdsl/catalog.lua", + ["vdsl.theme"] = "lua/vdsl/theme.lua", + ["vdsl.compiler"] = "lua/vdsl/compiler.lua", + ["vdsl.decode"] = "lua/vdsl/decode.lua", + ["vdsl.graph"] = "lua/vdsl/graph.lua", + ["vdsl.json"] = "lua/vdsl/json.lua", + ["vdsl.matcher"] = "lua/vdsl/matcher.lua", + ["vdsl.png"] = "lua/vdsl/png.lua", + ["vdsl.recipe"] = "lua/vdsl/recipe.lua", + ["vdsl.registry"] = "lua/vdsl/registry.lua", + ["vdsl.transport"] = "lua/vdsl/transport/init.lua", + ["vdsl.transport.curl"] = "lua/vdsl/transport/curl.lua", + ["vdsl.themes.cinema"] = "lua/vdsl/themes/cinema.lua", + ["vdsl.themes.anime"] = "lua/vdsl/themes/anime.lua", + ["vdsl.themes.architecture"] = "lua/vdsl/themes/architecture.lua", + }, + copy_directories = { "examples", "tests" }, +} \ No newline at end of file diff --git a/tests/packagedcode/data/rockspec/test4.rockspec b/tests/packagedcode/data/rockspec/test4.rockspec new file mode 100644 index 0000000000..f49f6f9095 --- /dev/null +++ b/tests/packagedcode/data/rockspec/test4.rockspec @@ -0,0 +1,32 @@ +---@diagnostic disable: lowercase-global + +local _MODREV, _SPECREV = "scm", "-1" +rockspec_format = "3.0" +version = _MODREV .. _SPECREV + +local user = "S1M0N38" +package = "claude.nvim" + +description = { + summary = "A simple plugin to integrate Claude Code in Neovim", + detailed = [[ +claude.nvim is a simple plugin to integrate Claude Code in Neovim. + ]], + labels = { "neovim", "plugin", "lua", "claude", "ai" }, + homepage = "https://github.com/" .. user .. "/" .. package, + license = "MIT", +} + +dependencies = { + "lua >= 5.1", +} + + +source = { + url = "git://github.com/" .. user .. "/" .. package, +} + +build = { + type = "builtin", + copy_directories = { "plugin", "doc", "scripts" }, +} \ No newline at end of file diff --git a/tests/packagedcode/test_rockspec.py b/tests/packagedcode/test_rockspec.py new file mode 100644 index 0000000000..fef3daee0f --- /dev/null +++ b/tests/packagedcode/test_rockspec.py @@ -0,0 +1,322 @@ +# +# Copyright (c) nexB Inc. and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/nexB/scancode-toolkit for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. + + +import json +import os +import tempfile + +from packagedcode import rockspec +from packages_test_utils import PackageTester +from scancode.cli_test_utils import run_scan_click + + +class TestRockspecParser(PackageTester): + """Tests for RockspecParser following ScanCode's testing patterns.""" + + test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + + def test_mandatory_fields_test_1(self): + """Test extraction of mandatory fields from test1 rockspec.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + data = parser.parse() + + assert data['package'] == 'kong' + assert data['version'] == '3.3.0-0' + assert data['vcs_url'] == 'git+https://github.com/Kong/kong.git' + assert len(parser.errors) == 0 + + def test_optional_fields_test_1(self): + """Test extraction of optional fields from test1 rockspec.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + data = parser.parse() + + assert data['description'] is not None + assert 'Kong is a scalable' in data['description'] + assert data['license'] == 'Apache 2.0' + assert data['homepage_url'] == 'https://konghq.com' + + def test_metadata_fields_test_1(self): + """Test extraction of metadata fields from test1 rockspec.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + data = parser.parse() + + assert data['rockspec_format'] == '3.0' + assert isinstance(data['supported_platforms'], list) + assert len(data['supported_platforms']) == 2 + assert 'linux' in data['supported_platforms'] + assert 'macosx' in data['supported_platforms'] + + def test_dependencies_test_1(self): + """Test extraction of dependencies from test1 rockspec. + + Dependencies are now returned as parsed dicts {name, version_spec, raw} + instead of raw strings. + """ + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + data = parser.parse() + + assert isinstance(data['dependencies'], list) + assert len(data['dependencies']) == 30 + + # Dependencies are now parsed dicts + dep_names = [dep['name'] for dep in data['dependencies']] + dep_raws = [dep['raw'] for dep in data['dependencies']] + + assert 'inspect' in dep_names + assert 'luasec' in dep_names + assert 'inspect == 3.1.3' in dep_raws + assert 'luasec == 1.3.1' in dep_raws + + def test_concatenation_variables_test4(self): + """Test extraction with variable concatenation in test4.rockspec.""" + test_file = self.get_test_loc('rockspec/test4.rockspec') + parser = rockspec.RockspecParser(test_file) + data = parser.parse() + + # version = _MODREV .. _SPECREV should resolve to "scm-1" + assert data['package'] == 'claude.nvim' + assert data['version'] == 'scm-1' + + # URL concatenation should resolve all variables + assert 'github.com' in data['vcs_url'] + assert 'S1M0N38' in data['vcs_url'] + assert 'claude.nvim' in data['vcs_url'] + + # Homepage concatenation + assert 'github.com' in data['homepage_url'] + assert 'S1M0N38' in data['homepage_url'] + assert 'claude.nvim' in data['homepage_url'] + + assert data['license'] == 'MIT' + assert len(parser.errors) == 0 + + def test_error_missing_package(self): + """Test error handling when package field is missing.""" + rockspec_content = 'version = "1.0.0"\nsource = { url = "git://test" }' + + with tempfile.NamedTemporaryFile(mode='w', suffix='.rockspec', delete=False) as f: + f.write(rockspec_content) + f.flush() + temp_file = f.name + + try: + parser = rockspec.RockspecParser(temp_file) + data = parser.parse() + + assert data['package'] is None + assert any(err.field == 'package' for err in parser.errors) + finally: + os.unlink(temp_file) + + def test_error_missing_version(self): + """Test error handling when version field is missing.""" + rockspec_content = 'package = "test"\nsource = { url = "git://test" }' + + with tempfile.NamedTemporaryFile(mode='w', suffix='.rockspec', delete=False) as f: + f.write(rockspec_content) + f.flush() + temp_file = f.name + + try: + parser = rockspec.RockspecParser(temp_file) + data = parser.parse() + + assert data['version'] is None + assert any(err.field == 'version' for err in parser.errors) + finally: + os.unlink(temp_file) + + def test_error_missing_source_url(self): + """Test error handling when source.url is missing.""" + rockspec_content = 'package = "test"\nversion = "1.0"\nsource = { tag = "v1" }' + + with tempfile.NamedTemporaryFile(mode='w', suffix='.rockspec', delete=False) as f: + f.write(rockspec_content) + f.flush() + temp_file = f.name + + try: + parser = rockspec.RockspecParser(temp_file) + data = parser.parse() + + assert data['vcs_url'] is None + assert any(err.field == 'source.url' for err in parser.errors) + finally: + os.unlink(temp_file) + + def test_error_file_not_found(self): + """Test error handling when rockspec file does not exist.""" + parser = rockspec.RockspecParser('/nonexistent/rockspec/path.rockspec') + data = parser.parse() + + assert data == {} + assert len(parser.errors) > 0 + + +class TestRockspecHandlerIntegration(PackageTester): + """Test RockspecHandler integration with ScanCode.""" + + test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + + def test_is_datafile_rockspec(self): + """Test that is_datafile recognizes .rockspec files.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + assert rockspec.RockspecHandler.is_datafile(test_file) + + def test_is_datafile_non_rockspec(self): + """Test that is_datafile rejects non-.rockspec files.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + # Just verify the handler has the correct path_patterns + assert '*.rockspec' in rockspec.RockspecHandler.path_patterns + + def test_handler_is_registered(self): + """Test that RockspecHandler is registered in the system.""" + from packagedcode import APPLICATION_PACKAGE_DATAFILE_HANDLERS + handlers = [h for h in APPLICATION_PACKAGE_DATAFILE_HANDLERS + if h.datasource_id == 'luarocks_rockspec'] + assert len(handlers) == 1, f"Expected 1 RockspecHandler, found {len(handlers)}" + assert handlers[0] == rockspec.RockspecHandler + + def test_handler_in_datasource_registry(self): + """Test that handler is in the HANDLER_BY_DATASOURCE_ID registry.""" + from packagedcode import HANDLER_BY_DATASOURCE_ID + handler = HANDLER_BY_DATASOURCE_ID.get('luarocks_rockspec') + assert handler is not None + assert handler == rockspec.RockspecHandler + + def test_handler_attributes(self): + """Test that handler has required attributes.""" + assert rockspec.RockspecHandler.datasource_id == 'luarocks_rockspec' + assert rockspec.RockspecHandler.path_patterns == ('*.rockspec',) + assert rockspec.RockspecHandler.default_package_type == 'luarocks' + assert rockspec.RockspecHandler.default_primary_language == 'Lua' + assert rockspec.RockspecHandler.description is not None + + def test_debug_is_datafile_direct(self): + """Debug test: directly check if is_datafile works for the test file.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + + # This should return True if the handler can recognize the file + is_match = rockspec.RockspecHandler.is_datafile(test_file) + assert is_match, f"is_datafile() returned False for {test_file}" + + # Also verify parse works directly + packages = list(rockspec.RockspecHandler.parse(test_file)) + assert len(packages) > 0, f"parse() returned no packages for {test_file}" + + def test_end2end_rockspec_scan_with_package_flag(self): + """End-to-end test: scan a rockspec file with --package flag.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + result_file = self.get_temp_file('results.json') + run_scan_click(['--package', test_file, '--json', result_file]) + + # Parse results + with open(result_file) as f: + results = json.load(f) + + # Check that packages were found + packages = results.get('packages', []) + assert len(packages) > 0, f"No packages found in scan results. Got: {json.dumps(results, indent=2)}" + + # Verify package data + pkg = packages[0] + assert pkg['name'] == 'kong' + assert pkg['version'] == '3.3.0-0' + assert pkg['type'] == 'luarocks' + assert 'luarocks_rockspec' in pkg.get('datasource_ids', []) + + # Check dependencies from the top-level dependencies array + # (not in the Package object itself) + package_uid = pkg.get('package_uid') + dependencies = results.get('dependencies', []) + pkg_dependencies = [dep for dep in dependencies if dep.get('for_package_uid') == package_uid] + assert len(pkg_dependencies) == 30, f"Expected 30 dependencies, got {len(pkg_dependencies)}" + + + + +class TestDependencyParsing(PackageTester): + """Test parse_dependency helper method.""" + + test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + + def test_dependency_with_equals_operator(self): + """Test parsing dependency with == operator.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + result = parser.parse_dependency('inspect == 3.1.3') + + assert result is not None + assert result['name'] == 'inspect' + assert result['version_spec'] == '== 3.1.3' + + def test_dependency_with_gte_operator(self): + """Test parsing dependency with >= operator.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + result = parser.parse_dependency('binaryheap >= 0.4') + + assert result is not None + assert result['name'] == 'binaryheap' + assert result['version_spec'] == '>= 0.4' + + def test_dependency_without_version(self): + """Test parsing dependency without version spec.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + result = parser.parse_dependency('somedep') + + assert result is not None + assert result['name'] == 'somedep' + assert result['version_spec'] is None + + def test_dependency_empty_string(self): + """Test parsing empty dependency string.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + parser = rockspec.RockspecParser(test_file) + result = parser.parse_dependency('') + + assert result is None + def test_handler_parse_returns_package_data(self): + """Test that RockspecHandler.parse returns proper PackageData objects.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + packages = list(rockspec.RockspecHandler.parse(test_file)) + + assert len(packages) == 1 + pkg = packages[0] + + assert isinstance(pkg, rockspec.models.PackageData) + assert pkg.name == 'kong' + assert pkg.version == '3.3.0-0' + assert pkg.type == 'luarocks' + assert pkg.datasource_id == 'luarocks_rockspec' + assert pkg.vcs_url == 'git+https://github.com/Kong/kong.git' + assert len(pkg.dependencies) == 30 + + def test_handler_creates_dependent_packages(self): + """Test that dependencies are converted to DependentPackage objects.""" + test_file = self.get_test_loc('rockspec/test1.rockspec') + packages = list(rockspec.RockspecHandler.parse(test_file)) + + pkg = packages[0] + assert len(pkg.dependencies) > 0 + + for dep in pkg.dependencies: + assert isinstance(dep, rockspec.models.DependentPackage) + assert dep.scope == 'dependencies' + assert dep.is_runtime is True + + +if __name__ == '__main__': + import pytest + pytest.main([__file__, '-v']) From dc4fc9931656004c673d47b56db9adcf5ff82738 Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 13:44:32 +0530 Subject: [PATCH 4/9] Integrate RockspecHandler into __init__ Register handler in APPLICATION_PACKAGE_DATAFILE_HANDLERS to enable automatic .rockspec file detection. Signed-off-by: Prashanna Dahal --- src/packagedcode/__init__.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/packagedcode/__init__.py b/src/packagedcode/__init__.py index d3c48b6e25..67320a3057 100644 --- a/src/packagedcode/__init__.py +++ b/src/packagedcode/__init__.py @@ -35,6 +35,7 @@ from packagedcode import pubspec from packagedcode import pypi from packagedcode import readme +from packagedcode import rockspec from packagedcode import rpm from packagedcode import rubygems from packagedcode import swift @@ -202,6 +203,8 @@ rubygems.GemspecInExtractedGemHandler, rubygems.GemspecHandler, + rockspec.RockspecHandler, + swift.SwiftManifestJsonHandler, swift.SwiftPackageResolvedHandler, swift.SwiftShowDependenciesDepLockHandler, From 99cd99f01ca15123571eb225b56131cdec0b715a Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 13:54:52 +0530 Subject: [PATCH 5/9] Fix dead documentation links * Update contributing-docs link to new structure * Ensure both documentation links work correctly * Point to getting-started/contribute path Signed-off-by: Prashanna Dahal --- CONTRIBUTING.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f96994ea8d..7449f90183 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -85,7 +85,7 @@ template. Your help and contribution make ScanCode docs better, we love hearing The ScanCode documentation is hosted at `scancode-toolkit.readthedocs.io `_. -If you want to contribute to Scancode Documentation, you'll find `this guide here https://scancode-toolkit.readthedocs.io/en/latest/getting-started/contribute/contributing-docs.html`_ helpful. +If you want to contribute to Scancode Documentation, you'll find `this guide here `_ helpful. Development =========== @@ -123,7 +123,7 @@ To set up ScanCode for local development: git checkout -b name-of-your-bugfix-or-feature -4. Check out the Contributing to Code Development `documentation `_, as it contains more in-depth guide for contributing code and documentation. +4. Check out the Contributing to Code Development `documentation `_, as it contains more in-depth guide for contributing code and documentation. 5. To configure your local environment for development, locate to the main directory of the local repository, and run the configure script. From 3b359de564e911e5649408727c672cd9cd1b2c74 Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Fri, 13 Feb 2026 22:16:50 +0530 Subject: [PATCH 6/9] Fix luaparser version specifier #3526 Change luaparser from == to match project's version pinning pattern: * requirements.txt: luaparser==1.4.3 (exact pinning) * setup.cfg: luaparser == 1.4.3 (exact pinning) Signed-off-by: Prashanna Dahal --- requirements.txt | 1 + setup.cfg | 1 + 2 files changed, 2 insertions(+) diff --git a/requirements.txt b/requirements.txt index 3b59481345..b713c93152 100644 --- a/requirements.txt +++ b/requirements.txt @@ -42,6 +42,7 @@ jsonstreams==0.6.0 keyring==23.7.0 license-expression==30.4.4 lxml==6.0.2 +luaparser==4.0.0 MarkupSafe==3.0.3 more-itertools==10.8.0 multiregex==2.0.3 diff --git a/setup.cfg b/setup.cfg index 7c45f388fd..0a0e1946d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -91,6 +91,7 @@ install_requires = jsonstreams >= 0.5.0 license_expression >= 30.4.4 lxml >= 5.4.0 + luaparser == 4.0.0 MarkupSafe >= 2.1.2 multiregex >= 2.0.3 normality <= 2.6.1 From 9a0cd5fd641d187700dabdbce55a190917999f3e Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Sat, 14 Feb 2026 17:47:31 +0530 Subject: [PATCH 7/9] Add LuaRocks rockspec package handler to plugins list. Signed-off-by: Prashanna Dahal --- .../data/plugin/plugins_list_linux.txt | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tests/packagedcode/data/plugin/plugins_list_linux.txt b/tests/packagedcode/data/plugin/plugins_list_linux.txt index eb4763d6c7..0d18f15729 100755 --- a/tests/packagedcode/data/plugin/plugins_list_linux.txt +++ b/tests/packagedcode/data/plugin/plugins_list_linux.txt @@ -116,7 +116,7 @@ Package type: cargo documentation URL: https://github.com/rust-secure-code/cargo-auditable/blob/master/PARSING.md primary language: Rust description: Rust binary - path_patterns: + path_patterns: -------------------------------------------- Package type: chef datasource_id: chef_cookbook_metadata_json @@ -473,7 +473,7 @@ Package type: golang documentation URL: https://github.com/nexB/go-inspector/ primary language: Go description: Go binary - path_patterns: + path_patterns: -------------------------------------------- Package type: haxe datasource_id: haxelib_json @@ -545,6 +545,13 @@ Package type: linux-distro description: Linux OS release metadata file path_patterns: '*etc/os-release', '*usr/lib/os-release' -------------------------------------------- +Package type: luarocks + datasource_id: luarocks_rockspec + documentation URL: https://github.com/luarocks/luarocks/blob/main/docs/rockspec_format.md + primary language: Lua + description: LuaRocks rockspec file + path_patterns: '*.rockspec' +-------------------------------------------- Package type: maven datasource_id: build_gradle documentation URL: None @@ -683,7 +690,7 @@ Package type: osgi documentation URL: https://docs.oracle.com/javase/tutorial/deployment/jar/manifestindex.html primary language: Java description: Java OSGi MANIFEST.MF - path_patterns: + path_patterns: -------------------------------------------- Package type: pubspec datasource_id: pubspec_lock @@ -806,9 +813,9 @@ Package type: pypi -------------------------------------------- Package type: readme datasource_id: readme - documentation URL: + documentation URL: primary language: None - description: + description: path_patterns: '*/README.android', '*/README.chromium', '*/README.facebook', '*/README.google', '*/README.thirdparty' -------------------------------------------- Package type: rpm @@ -872,7 +879,7 @@ Package type: squashfs documentation URL: https://en.wikipedia.org/wiki/SquashFS primary language: None description: Squashfs disk image - path_patterns: + path_patterns: -------------------------------------------- Package type: swift datasource_id: swift_package_manifest_json From 4103a813824f484a074704ff047ebed85a047cfd Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Sat, 14 Feb 2026 19:38:20 +0530 Subject: [PATCH 8/9] Fix auto-formatter changes in .txt file - This was causing a test to fail in CI/CD Signed-off-by: Prashanna Dahal --- .../packagedcode/data/plugin/plugins_list_linux.txt | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/tests/packagedcode/data/plugin/plugins_list_linux.txt b/tests/packagedcode/data/plugin/plugins_list_linux.txt index 0d18f15729..acd9d393e2 100755 --- a/tests/packagedcode/data/plugin/plugins_list_linux.txt +++ b/tests/packagedcode/data/plugin/plugins_list_linux.txt @@ -116,7 +116,7 @@ Package type: cargo documentation URL: https://github.com/rust-secure-code/cargo-auditable/blob/master/PARSING.md primary language: Rust description: Rust binary - path_patterns: + path_patterns: -------------------------------------------- Package type: chef datasource_id: chef_cookbook_metadata_json @@ -473,7 +473,7 @@ Package type: golang documentation URL: https://github.com/nexB/go-inspector/ primary language: Go description: Go binary - path_patterns: + path_patterns: -------------------------------------------- Package type: haxe datasource_id: haxelib_json @@ -690,7 +690,7 @@ Package type: osgi documentation URL: https://docs.oracle.com/javase/tutorial/deployment/jar/manifestindex.html primary language: Java description: Java OSGi MANIFEST.MF - path_patterns: + path_patterns: -------------------------------------------- Package type: pubspec datasource_id: pubspec_lock @@ -813,9 +813,9 @@ Package type: pypi -------------------------------------------- Package type: readme datasource_id: readme - documentation URL: + documentation URL: primary language: None - description: + description: path_patterns: '*/README.android', '*/README.chromium', '*/README.facebook', '*/README.google', '*/README.thirdparty' -------------------------------------------- Package type: rpm @@ -879,7 +879,7 @@ Package type: squashfs documentation URL: https://en.wikipedia.org/wiki/SquashFS primary language: None description: Squashfs disk image - path_patterns: + path_patterns: -------------------------------------------- Package type: swift datasource_id: swift_package_manifest_json @@ -950,3 +950,4 @@ Package type: winexe primary language: None description: Windows Portable Executable metadata path_patterns: '*.exe', '*.dll', '*.mui', '*.mun', '*.com', '*.winmd', '*.sys', '*.tlb', '*.exe_*', '*.dll_*', '*.mui_*', '*.mun_*', '*.com_*', '*.winmd_*', '*.sys_*', '*.tlb_*', '*.ocx' + From d7b5fbd016b0286f4659a616bdc28bd0d2f66308 Mon Sep 17 00:00:00 2001 From: Prashanna Dahal Date: Sat, 14 Feb 2026 22:25:14 +0530 Subject: [PATCH 9/9] Fix auto-formatter changes in .txt file - Extra "/n" causing failure of CI/CD Signed-off-by: Prashanna Dahal --- tests/packagedcode/data/plugin/plugins_list_linux.txt | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/packagedcode/data/plugin/plugins_list_linux.txt b/tests/packagedcode/data/plugin/plugins_list_linux.txt index acd9d393e2..a8e9ad0de7 100755 --- a/tests/packagedcode/data/plugin/plugins_list_linux.txt +++ b/tests/packagedcode/data/plugin/plugins_list_linux.txt @@ -950,4 +950,3 @@ Package type: winexe primary language: None description: Windows Portable Executable metadata path_patterns: '*.exe', '*.dll', '*.mui', '*.mun', '*.com', '*.winmd', '*.sys', '*.tlb', '*.exe_*', '*.dll_*', '*.mui_*', '*.mun_*', '*.com_*', '*.winmd_*', '*.sys_*', '*.tlb_*', '*.ocx' -