Compare commits

..

15 Commits

Author SHA1 Message Date
Mike Hunhoff
5d90b70d4b Merge branch 'master' into dependabot/pip/pytest-8.3.5 2025-05-20 12:56:35 -06:00
Moritz
8206a97b0f Merge pull request #2659 from mandiant/dependabot/npm_and_yarn/web/explorer/vite-6.3.4
build(deps-dev): bump vite from 6.2.3 to 6.3.4 in /web/explorer
2025-05-20 16:47:50 +02:00
Capa Bot
5a33b4b2a8 Sync capa rules submodule 2025-05-19 18:21:38 +00:00
Capa Bot
fcfdeec377 Sync capa rules submodule 2025-05-19 18:21:13 +00:00
Capa Bot
37a63a751c Sync capa-testfiles submodule 2025-05-19 18:12:00 +00:00
zdw@
3a9f2136bb lint: log the failed example+rule (#2661)
* lint: log the failed example+rule

* Update scripts/lint.py

Co-authored-by: Mike Hunhoff <mike.hunhoff@gmail.com>

* fix lint

---------

Co-authored-by: Mike Hunhoff <mike.hunhoff@gmail.com>
2025-05-12 15:11:22 -06:00
Capa Bot
390e2a6315 Sync capa-testfiles submodule 2025-05-12 16:17:27 +00:00
Capa Bot
6a43084915 Sync capa-testfiles submodule 2025-05-12 16:06:51 +00:00
Mike Hunhoff
a2db738066 Merge branch 'master' into dependabot/pip/pytest-8.3.5 2025-05-12 09:53:21 -06:00
dependabot[bot]
6d7ca57fa9 build(deps): bump pydantic from 2.10.1 to 2.11.4 (#2660)
Bumps [pydantic](https://github.com/pydantic/pydantic) from 2.10.1 to 2.11.4.
- [Release notes](https://github.com/pydantic/pydantic/releases)
- [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md)
- [Commits](https://github.com/pydantic/pydantic/compare/v2.10.1...v2.11.4)

---
updated-dependencies:
- dependency-name: pydantic
  dependency-version: 2.11.4
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2025-05-12 09:48:46 -06:00
Ana María Martínez Gómez
d1090e8391 ci: Update Ubuntu version in actions (#2656)
ubuntu-20.04 has been deprecated causing several GH actions to fail:
https://github.com/actions/runner-images/issues/11101
2025-05-09 15:40:59 -06:00
dependabot[bot]
b07efe773b build(deps-dev): bump vite from 6.2.3 to 6.3.4 in /web/explorer
Bumps [vite](https://github.com/vitejs/vite/tree/HEAD/packages/vite) from 6.2.3 to 6.3.4.
- [Release notes](https://github.com/vitejs/vite/releases)
- [Changelog](https://github.com/vitejs/vite/blob/main/packages/vite/CHANGELOG.md)
- [Commits](https://github.com/vitejs/vite/commits/v6.3.4/packages/vite)

---
updated-dependencies:
- dependency-name: vite
  dependency-version: 6.3.4
  dependency-type: direct:development
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-04-30 18:53:28 +00:00
Mike Hunhoff
cb80620a62 Merge branch 'master' into dependabot/pip/pytest-8.3.5 2025-03-10 12:24:22 -06:00
Mike Hunhoff
ada34c0496 Merge branch 'master' into dependabot/pip/pytest-8.3.5 2025-03-04 13:40:26 -07:00
dependabot[bot]
52b4f8e0d0 build(deps): bump pytest from 8.0.0 to 8.3.5
Bumps [pytest](https://github.com/pytest-dev/pytest) from 8.0.0 to 8.3.5.
- [Release notes](https://github.com/pytest-dev/pytest/releases)
- [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst)
- [Commits](https://github.com/pytest-dev/pytest/compare/8.0.0...8.3.5)

---
updated-dependencies:
- dependency-name: pytest
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
2025-03-03 14:32:31 +00:00
11 changed files with 105 additions and 516 deletions

View File

@@ -22,12 +22,12 @@ jobs:
fail-fast: true
matrix:
include:
- os: ubuntu-20.04
- os: ubuntu-24.04
# use old linux so that the shared library versioning is more portable
artifact_name: capa
asset_name: linux
python_version: '3.10'
- os: ubuntu-20.04
- os: ubuntu-24.04
artifact_name: capa
asset_name: linux-py312
python_version: '3.12'
@@ -49,7 +49,7 @@ jobs:
uses: actions/setup-python@0a5c61591373683505ea898e09a3ea4f39ef2b9c # v5.0.0
with:
python-version: ${{ matrix.python_version }}
- if: matrix.os == 'ubuntu-20.04'
- if: matrix.os == 'ubuntu-24.04'
run: sudo apt-get install -y libyaml-dev
- name: Upgrade pip, setuptools
run: python -m pip install --upgrade pip setuptools
@@ -82,10 +82,10 @@ jobs:
matrix:
include:
# OSs not already tested above
- os: ubuntu-22.04
- os: ubuntu-24.04
artifact_name: capa
asset_name: linux
- os: ubuntu-22.04
- os: ubuntu-24.04
artifact_name: capa
asset_name: linux-py312
- os: windows-2022

View File

@@ -26,7 +26,7 @@ env:
jobs:
changelog_format:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -37,7 +37,7 @@ jobs:
if [ $number != 1 ]; then exit 1; fi
code_style:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout capa
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -64,7 +64,7 @@ jobs:
run: pre-commit run deptry --hook-stage manual
rule_linter:
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
steps:
- name: Checkout capa with submodules
uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
@@ -88,16 +88,16 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [ubuntu-20.04, windows-2019, macos-13]
os: [ubuntu-24.04, windows-2019, macos-13]
# across all operating systems
python-version: ["3.10", "3.11"]
include:
# on Ubuntu run these as well
- os: ubuntu-20.04
- os: ubuntu-24.04
python-version: "3.10"
- os: ubuntu-20.04
- os: ubuntu-24.04
python-version: "3.11"
- os: ubuntu-20.04
- os: ubuntu-24.04
python-version: "3.12"
steps:
- name: Checkout capa with submodules
@@ -109,7 +109,7 @@ jobs:
with:
python-version: ${{ matrix.python-version }}
- name: Install pyyaml
if: matrix.os == 'ubuntu-20.04'
if: matrix.os == 'ubuntu-24.04'
run: sudo apt-get install -y libyaml-dev
- name: Install capa
run: |
@@ -126,7 +126,7 @@ jobs:
name: Binary Ninja tests for ${{ matrix.python-version }}
env:
BN_SERIAL: ${{ secrets.BN_SERIAL }}
runs-on: ubuntu-22.04
runs-on: ubuntu-24.04
needs: [tests]
strategy:
fail-fast: false
@@ -168,7 +168,7 @@ jobs:
ghidra-tests:
name: Ghidra tests for ${{ matrix.python-version }}
runs-on: ubuntu-20.04
runs-on: ubuntu-24.04
needs: [tests]
strategy:
fail-fast: false

View File

@@ -6,7 +6,7 @@
### Breaking Changes
### New Rules (15)
### New Rules (17)
- communication/socket/connect-socket moritz.raabe@mandiant.com joakim@intezer.com mrhafizfarhad@gmail.com
- communication/socket/udp/connect-udp-socket mrhafizfarhad@gmail.com
@@ -22,6 +22,8 @@
- nursery/disable-firewall-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-system-restore-features-via-registry-on-windows mehunhoff@google.com
- nursery/disable-windows-defender-features-via-registry-on-windows mehunhoff@google.com
- host-interaction/file-system/write/clear-file-content jakeperalta7
- host-interaction/filter/unload-minifilter-driver JakePeralta7
-
### Bug Fixes

View File

@@ -122,7 +122,7 @@ dev = [
# These dependencies are not used in production environments
# and should not conflict with other libraries/tooling.
"pre-commit==4.1.0",
"pytest==8.0.0",
"pytest==8.3.5",
"pytest-sugar==1.0.0",
"pytest-instafail==0.5.0",
"flake8==7.1.1",

View File

@@ -26,7 +26,7 @@ protobuf==6.30.1
pyasn1==0.5.1
pyasn1-modules==0.3.0
pycparser==2.22
pydantic==2.10.1
pydantic==2.11.4
# pydantic pins pydantic-core,
# but dependabot updates these separately (which is broken) and is annoying,
# so we rely on pydantic to pull in the right version of pydantic-core.

2
rules

Submodule rules updated: d64c2c91ea...61092d21b5

View File

@@ -1,490 +0,0 @@
#!/usr/bin/env python
# /// script
# requires-python = ">=3.12"
# dependencies = [
# "protobuf",
# "python-lancelot",
# "rich",
# ]
# ///
#
# TODO:
# - ignore stack cookie check
import sys
import json
import time
import logging
import argparse
import contextlib
from typing import Any
from pathlib import Path
from collections import defaultdict
from dataclasses import dataclass
import lancelot
import rich.padding
import lancelot.be2utils
import google.protobuf.message
from rich.text import Text
from rich.theme import Theme
from rich.markup import escape
from rich.console import Console
from lancelot.be2utils.binexport2_pb2 import BinExport2
logger = logging.getLogger("codemap")
@contextlib.contextmanager
def timing(msg: str):
t0 = time.time()
yield
t1 = time.time()
logger.debug("perf: %s: %0.2fs", msg, t1 - t0)
class Renderer:
def __init__(self, console: Console):
self.console: Console = console
self.indent: int = 0
@contextlib.contextmanager
def indenting(self):
self.indent += 1
try:
yield
finally:
self.indent -= 1
@staticmethod
def markup(s: str, **kwargs) -> Text:
escaped_args = {k: (escape(v) if isinstance(v, str) else v) for k, v in kwargs.items()}
return Text.from_markup(s.format(**escaped_args))
def print(self, renderable, **kwargs):
if not kwargs:
return self.console.print(rich.padding.Padding(renderable, (0, 0, 0, self.indent * 2)))
assert isinstance(renderable, str)
return self.print(self.markup(renderable, **kwargs))
def writeln(self, s: str):
self.print(s)
@contextlib.contextmanager
def section(self, name):
if isinstance(name, str):
self.print("[title]{name}", name=name)
elif isinstance(name, Text):
name = name.copy()
name.stylize_before(self.console.get_style("title"))
self.print(name)
else:
raise ValueError("unexpected section name")
with self.indenting():
yield
@dataclass
class AssemblageLocation:
name: str
file: str
prototype: str
rva: int
@property
def path(self):
if not self.file.endswith(")"):
return self.file
return self.file.rpartition(" (")[0]
@classmethod
def from_dict(cls, data: dict[str, Any]):
return cls(
name=data["name"],
file=data["file"],
prototype=data["prototype"],
rva=data["function_start"],
)
@staticmethod
def from_json(doc: str):
return AssemblageLocation.from_dict(json.loads(doc))
def main(argv: list[str] | None = None):
if argv is None:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description="Inspect BinExport2 files")
parser.add_argument("input_file", type=Path, help="path to input file")
parser.add_argument("--capa", type=Path, help="path to capa JSON results file")
parser.add_argument("--assemblage", type=Path, help="path to Assemblage JSONL file")
parser.add_argument("-d", "--debug", action="store_true", help="enable debugging output on STDERR")
parser.add_argument("-q", "--quiet", action="store_true", help="disable all output but errors")
args = parser.parse_args(args=argv)
logging.basicConfig()
if args.quiet:
logging.getLogger().setLevel(logging.WARNING)
elif args.debug:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.INFO)
theme = Theme(
{
"decoration": "grey54",
"title": "yellow",
"key": "black",
"value": "blue",
"default": "black",
},
inherit=False,
)
console = Console(theme=theme, markup=False, emoji=False)
o = Renderer(console)
be2: BinExport2
buf: bytes
try:
# easiest way to determine if this is a BinExport2 proto is...
# to just try to decode it.
buf = args.input_file.read_bytes()
with timing("loading BinExport2"):
be2 = BinExport2()
be2.ParseFromString(buf)
except google.protobuf.message.DecodeError:
with timing("analyzing file"):
input_file: Path = args.input_file
buf = lancelot.get_binexport2_bytes_from_bytes(input_file.read_bytes())
with timing("loading BinExport2"):
be2 = BinExport2()
be2.ParseFromString(buf)
with timing("indexing BinExport2"):
idx = lancelot.be2utils.BinExport2Index(be2)
matches_by_function: defaultdict[int, set[str]] = defaultdict(set)
if args.capa:
with timing("loading capa"):
doc = json.loads(args.capa.read_text())
functions_by_basic_block: dict[int, int] = {}
for function in doc["meta"]["analysis"]["layout"]["functions"]:
for basic_block in function["matched_basic_blocks"]:
functions_by_basic_block[basic_block["address"]["value"]] = function["address"]["value"]
matches_by_address: defaultdict[int, set[str]] = defaultdict(set)
for rule_name, results in doc["rules"].items():
for location, _ in results["matches"]:
if location["type"] != "absolute":
continue
address = location["value"]
matches_by_address[location["value"]].add(rule_name)
for address, matches in matches_by_address.items():
if function := functions_by_basic_block.get(address):
if function in idx.thunks:
# forward any capa for a thunk to its target
# since viv may not recognize the thunk as a separate function.
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", function, idx.thunks[function])
function = idx.thunks[function]
matches_by_function[function].update(matches)
for match in matches:
logger.info("capa: 0x%x: %s", function, match)
else:
# we don't know which function this is.
# hopefully its a function recognized in our BinExport analysis.
# *shrug*
#
# apparently viv doesn't emit function entries for thunks?
# or somehow our layout is messed up.
if address in idx.thunks:
# forward any capa for a thunk to its target
# since viv may not recognize the thunk as a separate function.
logger.debug("forwarding capa matches from thunk 0x%x to 0x%x", address, idx.thunks[address])
address = idx.thunks[address]
# since we found the thunk, we know this is a BinExport-recognized function.
# so thats nice.
for match in matches:
logger.info("capa: 0x%x: %s", address, match)
else:
logger.warning("unknown address: 0x%x: %s", address, matches)
matches_by_function[address].update(matches)
# guess the base address (which BinExport2) does not track explicitly,
# by assuming it is the lowest mapped page.
base_address = min(map(lambda section: section.address, be2.section))
logging.info("guessed base address: 0x%x", base_address)
assemblage_locations_by_va: dict[int, AssemblageLocation] = {}
if args.assemblage:
with timing("loading assemblage"):
with args.assemblage.open("rt", encoding="utf-8") as f:
for line in f:
if not line:
continue
location = AssemblageLocation.from_json(line)
assemblage_locations_by_va[base_address + location.rva] = location
# update function names for the in-memory BinExport2 using Assemblage data.
# this won't affect the be2 on disk, because we don't serialize it back out.
for address, location in assemblage_locations_by_va.items():
if not location.name:
continue
if vertex_index := idx.vertex_index_by_address.get(address):
vertex = be2.call_graph.vertex[vertex_index].demangled_name = location.name
# index all the callers of each function, resolving thunks.
# idx.callers_by_vertex_id does not resolve thunks.
resolved_callers_by_vertex_id = defaultdict(set)
for edge in be2.call_graph.edge:
source_index = edge.source_vertex_index
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[source_index]):
# we don't care about the callers that are thunks.
continue
if lancelot.be2utils.is_thunk_vertex(be2.call_graph.vertex[edge.target_vertex_index]):
thunk_vertex = be2.call_graph.vertex[edge.target_vertex_index]
thunk_address = thunk_vertex.address
target_address = idx.thunks[thunk_address]
target_index = idx.vertex_index_by_address[target_address]
logger.debug(
"call %s -(thunk)-> %s",
idx.get_function_name_by_vertex(source_index),
idx.get_function_name_by_vertex(target_index),
)
else:
target_index = edge.target_vertex_index
logger.debug(
"call %s -> %s",
idx.get_function_name_by_vertex(source_index),
idx.get_function_name_by_vertex(target_index),
)
resolved_callers_by_vertex_id[target_index].add(source_index)
t0 = time.time()
with o.section("meta"):
o.writeln(f"name: {be2.meta_information.executable_name}")
o.writeln(f"sha256: {be2.meta_information.executable_id}")
o.writeln(f"arch: {be2.meta_information.architecture_name}")
o.writeln(f"ts: {be2.meta_information.timestamp}")
with o.section("modules"):
for module in be2.module:
o.writeln(f"- {module.name}")
if not be2.module:
o.writeln("(none)")
with o.section("sections"):
for section in be2.section:
perms = ""
perms += "r" if section.flag_r else "-"
perms += "w" if section.flag_w else "-"
perms += "x" if section.flag_x else "-"
o.writeln(f"- {hex(section.address)} {perms} {hex(section.size)}")
with o.section("libraries"):
for library in be2.library:
o.writeln(
f"- {library.name:<12s} {'(static)' if library.is_static else ''}{(' at ' + hex(library.load_address)) if library.HasField('load_address') else ''}"
)
if not be2.library:
o.writeln("(none)")
vertex_order_by_address = {address: i for (i, address) in enumerate(idx.vertex_index_by_address.keys())}
with o.section("functions"):
last_address = None
for _, vertex_index in idx.vertex_index_by_address.items():
vertex = be2.call_graph.vertex[vertex_index]
vertex_order = vertex_order_by_address[vertex.address]
if vertex.HasField("library_index"):
continue
if vertex.HasField("module_index"):
continue
function_name = idx.get_function_name_by_vertex(vertex_index)
if last_address:
try:
last_path = assemblage_locations_by_va[last_address].path
path = assemblage_locations_by_va[vertex.address].path
if last_path != path:
o.print(o.markup("[blue]~~~~~~~~~~~~~~~~~~~~~~~~~~~~~[/] [title]file[/] {path}\n", path=path))
except KeyError:
pass
last_address = vertex.address
if lancelot.be2utils.is_thunk_vertex(vertex):
with o.section(
o.markup(
"thunk [default]{function_name}[/] [decoration]@ {function_address}[/]",
function_name=function_name,
function_address=hex(vertex.address),
)
):
continue
with o.section(
o.markup(
"function [default]{function_name}[/] [decoration]@ {function_address}[/]",
function_name=function_name,
function_address=hex(vertex.address),
)
):
if vertex.address in idx.thunks:
o.writeln("")
continue
# keep the xrefs separate from the calls, since they're visually hard to distinguish.
# use local index of callers that has resolved intermediate thunks,
# since they are sometimes stored in a physically distant location.
for caller_index in resolved_callers_by_vertex_id.get(vertex_index, []):
caller_vertex = be2.call_graph.vertex[caller_index]
caller_order = vertex_order_by_address[caller_vertex.address]
caller_delta = caller_order - vertex_order
if caller_delta < 0:
direction = ""
else:
direction = ""
o.print(
"xref: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
direction=direction,
name=idx.get_function_name_by_vertex(caller_index),
delta=caller_delta,
)
if vertex.address not in idx.flow_graph_index_by_address:
num_basic_blocks = 0
num_instructions = 0
num_edges = 0
total_instruction_size = 0
else:
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
flow_graph = be2.flow_graph[flow_graph_index]
num_basic_blocks = len(flow_graph.basic_block_index)
num_instructions = sum(
len(list(idx.instruction_indices(be2.basic_block[bb_idx])))
for bb_idx in flow_graph.basic_block_index
)
num_edges = len(flow_graph.edge)
total_instruction_size = 0
for bb_idx in flow_graph.basic_block_index:
basic_block = be2.basic_block[bb_idx]
for _, instruction, _ in idx.basic_block_instructions(basic_block):
total_instruction_size += len(instruction.raw_bytes)
o.writeln(
f"B/E/I: {num_basic_blocks} / {num_edges} / {num_instructions} ({total_instruction_size} bytes)"
)
for match in matches_by_function.get(vertex.address, []):
o.writeln(f"capa: {match}")
if vertex.address in idx.flow_graph_index_by_address:
flow_graph_index = idx.flow_graph_index_by_address[vertex.address]
flow_graph = be2.flow_graph[flow_graph_index]
seen_callees = set()
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction.call_target:
for call_target_address in instruction.call_target:
if call_target_address in idx.thunks:
call_target_address = idx.thunks[call_target_address]
call_target_index = idx.vertex_index_by_address[call_target_address]
call_target_vertex = be2.call_graph.vertex[call_target_index]
if call_target_vertex.HasField("library_index"):
continue
if call_target_vertex.address in seen_callees:
continue
seen_callees.add(call_target_vertex.address)
call_target_order = vertex_order_by_address[call_target_address]
call_target_delta = call_target_order - vertex_order
call_target_name = idx.get_function_name_by_address(call_target_address)
if call_target_delta < 0:
direction = ""
else:
direction = ""
o.print(
"calls: [decoration]{direction}[/] {name} [decoration]({delta:+})[/]",
direction=direction,
name=call_target_name,
delta=call_target_delta,
)
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction.call_target:
for call_target_address in instruction.call_target:
call_target_index = idx.vertex_index_by_address[call_target_address]
call_target_vertex = be2.call_graph.vertex[call_target_index]
if not call_target_vertex.HasField("library_index"):
continue
if call_target_vertex.address in seen_callees:
continue
seen_callees.add(call_target_vertex.address)
call_target_name = idx.get_function_name_by_address(call_target_address)
o.print(
"api: {name}",
name=call_target_name,
)
seen_strings = set()
for basic_block_index in flow_graph.basic_block_index:
basic_block = be2.basic_block[basic_block_index]
for instruction_index, instruction, _ in idx.basic_block_instructions(basic_block):
if instruction_index in idx.string_reference_index_by_source_instruction_index:
for string_reference_index in idx.string_reference_index_by_source_instruction_index[
instruction_index
]:
string_reference = be2.string_reference[string_reference_index]
string_index = string_reference.string_table_index
string = be2.string_table[string_index]
if string in seen_strings:
continue
seen_strings.add(string)
o.print(
'string: [decoration]"[/]{string}[decoration]"[/]',
string=string.rstrip(),
)
o.print("")
t1 = time.time()
logger.debug("perf: rendering BinExport2: %0.2fs", t1 - t0)
if __name__ == "__main__":
sys.exit(main())

View File

@@ -406,6 +406,7 @@ class DoesntMatchExample(Lint):
return True
if rule.name not in capabilities:
logger.info('rule "%s" does not match for sample %s', rule.name, example_id)
return True

View File

@@ -27,7 +27,7 @@
"eslint-plugin-vue": "^9.23.0",
"jsdom": "^24.1.0",
"prettier": "^3.2.5",
"vite": "^6.2.3",
"vite": "^6.3.4",
"vite-plugin-singlefile": "^2.2.0",
"vitest": "^3.0.9"
}
@@ -3426,6 +3426,51 @@
"dev": true,
"license": "MIT"
},
"node_modules/tinyglobby": {
"version": "0.2.13",
"resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.13.tgz",
"integrity": "sha512-mEwzpUgrLySlveBwEVDMKk5B57bhLPYovRfPAXD5gA/98Opn0rCDj3GtLwFvCvH5RK9uPCExUROW5NjDwvqkxw==",
"dev": true,
"license": "MIT",
"dependencies": {
"fdir": "^6.4.4",
"picomatch": "^4.0.2"
},
"engines": {
"node": ">=12.0.0"
},
"funding": {
"url": "https://github.com/sponsors/SuperchupuDev"
}
},
"node_modules/tinyglobby/node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/tinyglobby/node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/tinypool": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/tinypool/-/tinypool-1.0.2.tgz",
@@ -3561,15 +3606,18 @@
"dev": true
},
"node_modules/vite": {
"version": "6.2.3",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.2.3.tgz",
"integrity": "sha512-IzwM54g4y9JA/xAeBPNaDXiBF8Jsgl3VBQ2YQ/wOY6fyW3xMdSoltIV3Bo59DErdqdE6RxUfv8W69DvUorE4Eg==",
"version": "6.3.4",
"resolved": "https://registry.npmjs.org/vite/-/vite-6.3.4.tgz",
"integrity": "sha512-BiReIiMS2fyFqbqNT/Qqt4CVITDU9M9vE+DKcVAsB+ZV0wvTKd+3hMbkpxz1b+NmEDMegpVbisKiAZOnvO92Sw==",
"dev": true,
"license": "MIT",
"dependencies": {
"esbuild": "^0.25.0",
"fdir": "^6.4.4",
"picomatch": "^4.0.2",
"postcss": "^8.5.3",
"rollup": "^4.30.1"
"rollup": "^4.34.9",
"tinyglobby": "^0.2.13"
},
"bin": {
"vite": "bin/vite.js"
@@ -3672,6 +3720,34 @@
"vite": "^5.4.11 || ^6.0.0"
}
},
"node_modules/vite/node_modules/fdir": {
"version": "6.4.4",
"resolved": "https://registry.npmjs.org/fdir/-/fdir-6.4.4.tgz",
"integrity": "sha512-1NZP+GK4GfuAv3PqKvxQRDMjdSRZjnkq7KfhlNrCNNlZ0ygQFpebfrnfnq/W7fpUnAv9aGWmY1zKx7FYL3gwhg==",
"dev": true,
"license": "MIT",
"peerDependencies": {
"picomatch": "^3 || ^4"
},
"peerDependenciesMeta": {
"picomatch": {
"optional": true
}
}
},
"node_modules/vite/node_modules/picomatch": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.2.tgz",
"integrity": "sha512-M7BAV6Rlcy5u+m6oPhAPFgJTzAioX/6B0DxyvDlo9l8+T3nLKbrczg2WLUyzd45L8RqfUMyGPzekbMvX2Ldkwg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=12"
},
"funding": {
"url": "https://github.com/sponsors/jonschlinkert"
}
},
"node_modules/vitest": {
"version": "3.0.9",
"resolved": "https://registry.npmjs.org/vitest/-/vitest-3.0.9.tgz",

View File

@@ -33,7 +33,7 @@
"eslint-plugin-vue": "^9.23.0",
"jsdom": "^24.1.0",
"prettier": "^3.2.5",
"vite": "^6.2.3",
"vite": "^6.3.4",
"vite-plugin-singlefile": "^2.2.0",
"vitest": "^3.0.9"
}