12 Commits

14 changed files with 1050 additions and 268 deletions

55
.github/workflows/regtest.yml vendored Normal file
View File

@@ -0,0 +1,55 @@
# Samples are private; only core devs can access them.
name: Regression Test
on:
push:
branches: [main]
jobs:
regtest:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v5
with:
path: project
- name: Commit Message
id: commit_message
working-directory: ./project
run: |
COMMIT_MSG=$(git log -1 --pretty=%B | head -n 1)
echo "message=$(date +'%Y%m%d')-$(git rev-parse --short HEAD)-${COMMIT_MSG}" >> $GITHUB_OUTPUT
- name: Build
working-directory: ./project
run: |
mkdir build
cd build
cmake ../pycdc
cmake --build . --config Debug
cmake --install .
- name: Checkout regtest repository
uses: actions/checkout@v5
with:
repository: Lil-Ran/armorshot-regtest
token: ${{ secrets.REGTEST_PAT }}
path: regtest
fetch-depth: 1
lfs: true
submodules: true
- name: Run shot.py
run: |
touch ./regtest/.git/.no1shot
pip install pycryptodome
python3 ./project/oneshot/shot.py ./regtest --no-banner >/dev/null 2>&1
- name: Commit and push changes
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
with:
cwd: ./regtest
add: .
default_author: github_actions
message: ${{ steps.commit_message.outputs.message }}
commit: "--allow-empty"

View File

@@ -24,12 +24,12 @@ You don't need to execute the encrypted script. We decrypt them using the same a
### Universal
Currently we are trying to support Pyarmor 8.0 to 9.1.x (latest), Python 3.7 - 3.13, on all operating systems, with obfuscating options as many as possible. (However, we only have limited tests.)
Currently we are trying to support Pyarmor 8.0 to 9.1.9, Python 3.7 - 3.13, on all operating systems, with obfuscating options as many as possible. (However, we only have limited tests.)
You can run this tool in any environment, no need to be the same with obfuscated scripts or runtime.
> [!NOTE]
>
>
> If the data starts with `PY` followed by six digits, it is supported. Otherwise, if it starts with `PYARMOR`, it is generated by Pyarmor 7 or earlier, and is not supported.
### Easy to use
@@ -38,7 +38,7 @@ The only thing you need to do is specifying where your obfuscated scripts are. T
## Build
``` bash
```bash
mkdir build
cd build
cmake ../pycdc
@@ -50,7 +50,7 @@ You can also download prebuilt binary files on [releases page](https://github.co
## Usage
``` bash
```bash
python /path/to/oneshot/shot.py /path/to/scripts
```
@@ -73,10 +73,11 @@ Feel free to open an issue if you have any questions, suggestions, or problems.
## Todo (PR Welcome!)
- [ ] Multi-platform pyarmor_runtime executable
- [ ] Support more obfuscating options
- [ ] Regenerate pyc for other backends
- [ ] Documentation (Do not accept PR about this)
- [ ] Regenerate pyc for other backend decompilers (discussion in [GH-24](https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot/issues/24), [GH-30](https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot/issues/30))
- [ ] BCC Mode native part analysis tool
- [ ] Verify support for different obfuscating options
- [ ] Verify support for pyarmor_runtime executable on different platforms
## For CTF Challenge Makers

View File

@@ -9,10 +9,13 @@ def ascii_ratio(data: bytes) -> float:
def source_as_file(file_path: str) -> Union[List[bytes], None]:
try:
with open(file_path, 'r') as f:
co = compile(f.read(), '<str>', 'exec')
data = [i for i in co.co_consts if type(i) is bytes
and i.startswith(b'PY00') and len(i) > 64]
with open(file_path, "r") as f:
co = compile(f.read(), "<str>", "exec")
data = [
i
for i in co.co_consts
if type(i) is bytes and i.startswith(b"PY00") and len(i) > 64
]
return data
except:
return None
@@ -21,12 +24,19 @@ def source_as_file(file_path: str) -> Union[List[bytes], None]:
def source_as_lines(file_path: str) -> Union[List[bytes], None]:
data = []
try:
with open(file_path, 'r') as f:
with open(file_path, "r") as f:
for line in f:
try:
co = compile(line, '<str>', 'exec')
data.extend([i for i in co.co_consts if type(i) is bytes
and i.startswith(b'PY00') and len(i) > 64])
co = compile(line, "<str>", "exec")
data.extend(
[
i
for i in co.co_consts
if type(i) is bytes
and i.startswith(b"PY00")
and len(i) > 64
]
)
except:
# ignore not compilable lines
pass
@@ -39,14 +49,14 @@ def find_data_from_bytes(data: bytes, max_count=-1) -> List[bytes]:
result = []
idx = 0
while len(result) != max_count:
idx = data.find(b'PY00')
idx = data.find(b"PY00")
if idx == -1:
break
data = data[idx:]
if len(data) < 64:
break
header_len = int.from_bytes(data[28:32], 'little')
body_len = int.from_bytes(data[32:36], 'little')
header_len = int.from_bytes(data[28:32], "little")
body_len = int.from_bytes(data[32:36], "little")
if header_len > 256 or body_len > 0xFFFFF or header_len + body_len > len(data):
# compressed or coincident, skip
data = data[5:]
@@ -55,16 +65,20 @@ def find_data_from_bytes(data: bytes, max_count=-1) -> List[bytes]:
complete_object_length = header_len + body_len
# maybe followed by data for other Python versions or another part of BCC
next_segment_offset = int.from_bytes(data[56:60], 'little')
next_segment_offset = int.from_bytes(data[56:60], "little")
data_next = data[next_segment_offset:]
while next_segment_offset != 0 and data_next.startswith(b'PY00') and len(data_next) >= 64:
header_len = int.from_bytes(data_next[28:32], 'little')
body_len = int.from_bytes(data_next[32:36], 'little')
while (
next_segment_offset != 0
and data_next.startswith(b"PY00")
and len(data_next) >= 64
):
header_len = int.from_bytes(data_next[28:32], "little")
body_len = int.from_bytes(data_next[32:36], "little")
complete_object_length = next_segment_offset + header_len + body_len
if int.from_bytes(data_next[56:60], 'little') == 0:
if int.from_bytes(data_next[56:60], "little") == 0:
break
next_segment_offset += int.from_bytes(data_next[56:60], 'little')
next_segment_offset += int.from_bytes(data_next[56:60], "little")
data_next = data[next_segment_offset:]
result.append(data[:complete_object_length])
@@ -72,51 +86,60 @@ def find_data_from_bytes(data: bytes, max_count=-1) -> List[bytes]:
return result
def nuitka_package(head: bytes, relative_path: str) -> Union[List[Tuple[str, bytes]], None]:
first_occurrence = head.find(b'PY00')
def nuitka_package(
head: bytes, relative_path: str
) -> Union[List[Tuple[str, bytes]], None]:
first_occurrence = head.find(b"PY00")
if first_occurrence == -1:
return None
last_dot_bytecode = head.rfind(b'.bytecode\x00', 0, first_occurrence)
last_dot_bytecode = head.rfind(b".bytecode\x00", 0, first_occurrence)
if last_dot_bytecode == -1:
return None
length = int.from_bytes(
head[last_dot_bytecode-4:last_dot_bytecode], 'little')
length = int.from_bytes(head[last_dot_bytecode - 4 : last_dot_bytecode], "little")
end = last_dot_bytecode + length
cur = last_dot_bytecode
result = []
while cur < end:
module_name_len = head.find(b'\x00', cur, end) - cur
module_name = head[cur:cur + module_name_len].decode('utf-8')
module_name_len = head.find(b"\x00", cur, end) - cur
module_name = head[cur : cur + module_name_len].decode("utf-8")
cur += module_name_len + 1
module_len = int.from_bytes(head[cur:cur + 4], 'little')
module_len = int.from_bytes(head[cur : cur + 4], "little")
cur += 4
module_data = find_data_from_bytes(head[cur:cur + module_len], 1)
module_data = find_data_from_bytes(head[cur : cur + module_len], 1)
if module_data:
result.append((os.path.join(relative_path.rstrip(
'/\\') + '.1shot.ext', module_name), module_data[0]))
result.append(
(
os.path.join(
relative_path.rstrip("/\\") + ".1shot.ext", module_name
),
module_data[0],
)
)
cur += module_len
if result:
logger = logging.getLogger('detect')
logger.info(f'Found data in Nuitka package: {relative_path}')
logger = logging.getLogger("detect")
logger.info(f"Found data in Nuitka package: {relative_path}")
return result
return None
def detect_process(file_path: str, relative_path: str) -> Union[List[Tuple[str, bytes]], None]:
'''
def detect_process(
file_path: str, relative_path: str
) -> Union[List[Tuple[str, bytes]], None]:
"""
Returns a list of (relative_path, bytes_raw) tuples, or None.
Do not raise exceptions.
'''
logger = logging.getLogger('detect')
"""
logger = logging.getLogger("detect")
try:
with open(file_path, 'rb') as f:
with open(file_path, "rb") as f:
head = f.read(16 * 1024 * 1024)
except:
logger.error(f'Failed to read file: {relative_path}')
logger.error(f"Failed to read file: {relative_path}")
return None
if b'__pyarmor__' not in head:
if b"__pyarmor__" not in head:
# no need to dig deeper
return None
@@ -134,16 +157,16 @@ def detect_process(file_path: str, relative_path: str) -> Union[List[Tuple[str,
if result_len == 0:
return None
elif result_len == 1:
logger.info(f'Found data in source: {relative_path}')
logger.info(f"Found data in source: {relative_path}")
return [(relative_path, result[0])]
else:
logger.info(f'Found data in source: {relative_path}')
return [(f'{relative_path}__{i}', result[i]) for i in range(len(result))]
logger.info(f"Found data in source: {relative_path}")
return [(f"{relative_path}__{i}", result[i]) for i in range(len(result))]
# binary file
# ignore data after 16MB, before we have a reason to read more
if b'Error, corrupted constants object' in head:
if b"Error, corrupted constants object" in head:
# an interesting special case: packer put armored data in a Nuitka package
# we can know the exact module names, instead of adding boring __0, __1, ...
return nuitka_package(head, relative_path)
@@ -153,8 +176,8 @@ def detect_process(file_path: str, relative_path: str) -> Union[List[Tuple[str,
if result_len == 0:
return None
elif result_len == 1:
logger.info(f'Found data in binary: {relative_path}')
logger.info(f"Found data in binary: {relative_path}")
return [(relative_path, result[0])]
else:
logger.info(f'Found data in binary: {relative_path}')
return [(f'{relative_path}__{i}', result[i]) for i in range(len(result))]
logger.info(f"Found data in binary: {relative_path}")
return [(f"{relative_path}__{i}", result[i]) for i in range(len(result))]

View File

@@ -1,7 +1,10 @@
import hashlib
import logging
from util import dword, bytes_sub
GLOBAL_CERT = bytes.fromhex('''
GLOBAL_CERT = bytes.fromhex("""
30 82 01 0a 02 82 01 01 00 bf 65 30 f3 bd 67 e7
a6 9d f8 db 18 b2 b9 c1 c0 5f fe fb e5 4b 91 df
6f 38 da 51 cc ea c4 d3 04 bd 95 27 86 c1 13 ca
@@ -19,13 +22,16 @@ af 09 fb 04 54 a9 ea c0 c1 e9 32 6c 77 92 7f 9f
4b 2c 1a 78 85 7c bc 2c d0 d7 83 77 5f 92 d5 db
59 10 96 53 2e 5d c7 42 12 b8 61 cb 2c 5f 46 14
9e 93 b0 53 21 a2 74 34 2d 02 03 01 00 01
''')
""")
logger = logging.getLogger("runtime")
class RuntimeInfo:
def __init__(self, file_path: str) -> None:
self.file_path = file_path
if file_path.endswith('.pyd'):
if file_path.endswith(".pyd"):
self.extract_info_win64()
else:
# TODO: implement for other platforms
@@ -35,62 +41,80 @@ class RuntimeInfo:
self.runtime_aes_key = self.calc_aes_key()
def __str__(self) -> str:
trial = self.serial_number == '000000'
product = ''
trial = self.serial_number == "000000"
product = ""
for c in self.part_3[2:]:
if 32 <= c <= 126:
product += chr(c)
else:
break
return f'''\
return f"""\
========================
Pyarmor Runtime ({'Trial' if trial else self.serial_number}) Information:
Pyarmor Runtime ({"Trial" if trial else self.serial_number}) Information:
Product: {product}
AES key: {self.runtime_aes_key.hex()}
Mix string AES nonce: {self.mix_str_aes_nonce().hex()}
========================'''
========================"""
def __repr__(self) -> str:
return f'RuntimeInfo(part_1={self.part_1}, part_2={self.part_2}, part_3={self.part_3})'
return f"RuntimeInfo(part_1={self.part_1}, part_2={self.part_2}, part_3={self.part_3})"
def extract_info_win64(self) -> None:
'''
"""
Try to find useful information from `pyarmor_runtime.pyd` file,
and store all three parts in the object.
'''
with open(self.file_path, 'rb') as f:
"""
with open(self.file_path, "rb") as f:
data = f.read(16 * 1024 * 1024)
cur = data.index(b'pyarmor-vax')
cur = data.index(b"pyarmor-vax")
if data[cur+11:cur+18] == b'\x00' * 7:
raise ValueError(f'{self.file_path} is a runtime template')
if data[cur + 11 : cur + 18] == b"\x00" * 7:
raise ValueError(f"{self.file_path} is a runtime template")
self.part_1 = data[cur:cur+20]
# Align with pyd file and executable address:
# In .pyd files b"pyarmor-vax" locates at 0x???2C
# But not .so
data = bytearray(bytes_sub(data, cur - 0x2C, 0x800))
cur += 36
part_2_offset = int.from_bytes(data[cur:cur+4], 'little')
part_2_len = int.from_bytes(data[cur+4:cur+8], 'little')
part_3_offset = int.from_bytes(data[cur+8:cur+12], 'little')
cur += 16
self.part_2 = data[cur+part_2_offset:cur+part_2_offset+part_2_len]
if data[0x5C] & 1 != 0:
logger.error(
'External key file ".pyarmor.ikey" is not supported yet, but it will be supported once we get a sample (like this one). Please open an issue on https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot/issues to make this tool stronger.'
)
raise NotImplementedError(f'{self.file_path} uses ".pyarmor.ikey"')
cur += part_3_offset
part_3_len = int.from_bytes(data[cur+4:cur+8], 'little')
cur += 32
self.part_3 = data[cur:cur+part_3_len]
if dword(data, 0x4C) != 0:
xor_flag = 0x60 + dword(data, 0x48)
xor_target = 0x60 + dword(data, 0x50)
xor_length = int.from_bytes(data[xor_flag + 1 : xor_flag + 4], "little")
if data[xor_flag] == 1:
for i in range(xor_length):
# MUT data
data[xor_target + i] ^= data[xor_flag + 4 + i]
self.part_1 = bytes_sub(data, 0x2C, 20)
part_2_offset = dword(data, 0x50)
part_2_len = dword(data, 0x54)
self.part_2 = bytes_sub(data, 0x60 + part_2_offset, part_2_len)
var_a1 = 0x60 + dword(data, 0x58)
part_3_len = dword(data, var_a1 + 4)
self.part_3 = bytes_sub(data, var_a1 + 0x20, part_3_len)
def calc_aes_key(self) -> bytes:
return hashlib.md5(self.part_1 + self.part_2 + self.part_3 + GLOBAL_CERT).digest()
return hashlib.md5(
self.part_1 + self.part_2 + self.part_3 + GLOBAL_CERT
).digest()
def mix_str_aes_nonce(self) -> bytes:
return self.part_3[:12]
@classmethod
def default(cls) -> 'RuntimeInfo':
def default(cls) -> "RuntimeInfo":
instance = cls.__new__(cls)
instance.file_path = '<default>'
instance.part_1 = b'pyarmor-vax-000000\x00\x00'
instance.part_2 = bytes.fromhex('''
instance.file_path = "<default>"
instance.part_1 = b"pyarmor-vax-000000\x00\x00"
instance.part_2 = bytes.fromhex("""
30 81 89 02 81 81 00 A8 ED 64 F4 83 49 13 FC 0F
86 6F 00 5A 8F E4 91 AA ED 1C EA D4 BB 4C 3F 7C
24 21 01 A8 D0 7D 93 F4 BF E7 FB 8C 06 57 88 6A
@@ -100,23 +124,24 @@ class RuntimeInfo:
BA 52 C5 B6 40 F6 AD AB BC D5 CF 5B 40 CB 8D 13
C4 28 B8 90 93 C4 76 01 09 8E 05 1E 61 FA 90 4C
BF 67 D4 A7 D5 82 C1 02 03 01 00 01
''')
instance.part_3 = bytes.fromhex('''
""")
instance.part_3 = bytes.fromhex("""
69 2E 6E 6F 6E 2D 70 72 6F 66 69 74 73 E7 5A 41
9B DC 77 53 CA 1D E7 04 EB EF DA C9 A3 6C 0F 7B
00 00 00 01 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00
00 00 00 00 00 00 00
''')
instance.serial_number = '000000'
""")
instance.serial_number = "000000"
instance.runtime_aes_key = instance.calc_aes_key()
return instance
if __name__ == '__main__':
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print('Usage: python runtime.py path/to/pyarmor_runtime[.pyd|.so|.dylib]')
print("Usage: python runtime.py path/to/pyarmor_runtime[.pyd|.so|.dylib]")
exit(1)
for i in sys.argv[1:]:
runtime = RuntimeInfo(i)

View File

@@ -5,14 +5,22 @@ import os
import asyncio
import traceback
import platform
import locale
from typing import Dict, List, Tuple
try:
from colorama import init, Fore, Style
from colorama import init, Fore, Style # type: ignore
except ImportError:
def init(**kwargs): pass
class Fore: CYAN = RED = YELLOW = GREEN = ''
class Style: RESET_ALL = ''
def init(**kwargs):
pass
class Fore:
CYAN = RED = YELLOW = GREEN = ""
class Style:
RESET_ALL = ""
from detect import detect_process
from runtime import RuntimeInfo
@@ -27,154 +35,273 @@ def general_aes_ctr_decrypt(data: bytes, key: bytes, nonce: bytes) -> bytes:
return cipher.decrypt(data)
async def decrypt_file_async(exe_path, seq_file_path, path, args):
logger = logging.getLogger('shot')
def decode_output(data: bytes) -> str:
if not data:
return ""
# 1) try chardet if available to guess encoding
try:
# Run without timeout
import chardet # type: ignore
res = chardet.detect(data)
enc = res.get("encoding")
if enc:
return data.decode(enc, errors="replace")
except Exception:
pass
# 2) try common encodings in a reasonable order
attempts = [
"utf-8",
"utf-8-sig",
locale.getpreferredencoding(False) or None,
"cp936",
"latin-1",
]
for enc in attempts:
if not enc:
continue
try:
return data.decode(enc)
except Exception:
continue
try:
return data.decode("latin-1", errors="replace")
except Exception:
return ""
async def run_pycdc_async(
exe_path: str,
seq_file_path: str,
path_for_log: str,
*,
unit_buf: bool = False,
no_banner: bool = False,
show_all: bool = False,
show_err_opcode: bool = False,
show_warn_stack: bool = False,
):
logger = logging.getLogger("shot")
try:
options = []
if unit_buf:
options.append("--unitbuf")
if no_banner:
options.append("--no-banner")
process = await asyncio.create_subprocess_exec(
exe_path,
*options,
seq_file_path,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
stdout, stderr = await process.communicate()
stdout_lines = stdout.decode('latin-1').splitlines()
stderr_lines = stderr.decode('latin-1').splitlines()
stdout_lines = decode_output(stdout).splitlines()
stderr_lines = decode_output(stderr).splitlines()
for line in stdout_lines:
logger.warning(f'PYCDC: {line} ({path})')
logger.warning(f"PYCDC: {line} ({path_for_log})")
for line in stderr_lines:
if line.startswith((
'Warning: Stack history is empty',
'Warning: Stack history is not empty',
'Warning: block stack is not empty',
)):
if args.show_warn_stack or args.show_all:
logger.warning(f'PYCDC: {line} ({path})')
elif line.startswith('Unsupported opcode:'):
if args.show_err_opcode or args.show_all:
logger.error(f'PYCDC: {line} ({path})')
elif line.startswith((
'Something TERRIBLE happened',
'Unsupported argument',
'Unsupported Node type',
'Unsupported node type',
)): # annoying wont-fix errors
if args.show_all:
logger.error(f'PYCDC: {line} ({path})')
if not unit_buf and line.startswith("Access violation caught"):
# retry with --unitbuf
await run_pycdc_async(
exe_path,
seq_file_path,
path_for_log,
unit_buf=True,
no_banner=no_banner,
show_all=show_all,
show_err_opcode=show_err_opcode,
show_warn_stack=show_warn_stack,
)
# do not log anything because it will be logged in the retried call
return
if line.startswith(
(
"Warning: Stack history is empty",
"Warning: Stack history is not empty",
"Warning: block stack is not empty",
)
):
if show_warn_stack or show_all:
logger.warning(f"PYCDC: {line} ({path_for_log})")
elif line.startswith("Unsupported opcode:"):
if show_err_opcode or show_all:
logger.error(f"PYCDC: {line} ({path_for_log})")
elif line.startswith(
(
"Something TERRIBLE happened",
"Unsupported argument",
"Unsupported Node type",
"Unsupported node type",
"Access violation caught",
)
): # annoying wont-fix errors
if show_all:
logger.error(f"PYCDC: {line} ({path_for_log})")
else:
logger.error(f'PYCDC: {line} ({path})')
logger.error(f"PYCDC: {line} ({path_for_log})")
if process.returncode != 0:
logger.warning(f'{Fore.YELLOW}PYCDC returned 0x{process.returncode:x} ({path}){Style.RESET_ALL}')
logger.warning(
f"{Fore.YELLOW}PYCDC returned 0x{process.returncode:x} ({path_for_log}){Style.RESET_ALL}"
)
except Exception as e:
error_details = traceback.format_exc()
logger.error(f'{Fore.RED}Exception: {e} ({path}){Style.RESET_ALL}')
logger.error(f'{Fore.RED}Error details: {error_details}{Style.RESET_ALL}')
logger.error(f"{Fore.RED}Exception: {e} ({path_for_log}){Style.RESET_ALL}")
logger.error(f"{Fore.RED}Error details: {error_details}{Style.RESET_ALL}")
async def decrypt_process_async(runtimes: Dict[str, RuntimeInfo], sequences: List[Tuple[str, bytes]], args):
logger = logging.getLogger('shot')
async def decrypt_process_async(
runtimes: Dict[str, RuntimeInfo], sequences: List[Tuple[str, bytes]], args
):
logger = logging.getLogger("shot")
output_dir: str = args.output_dir or args.directory
# Create a semaphore to limit concurrent processes
semaphore = asyncio.Semaphore(args.concurrent) # Use the concurrent argument
# Get the appropriate executable for the current platform
exe_path = get_platform_executable(args)
exe_path = get_platform_executable(args.executable)
semaphore = asyncio.Semaphore(args.concurrent)
async def process_file(path, data):
async def process_file(relative_path, data):
async with semaphore:
try:
serial_number = data[2:8].decode('utf-8')
serial_number = data[2:8].decode("utf-8")
runtime = runtimes[serial_number]
logger.info(f'{Fore.CYAN}Decrypting: {serial_number} ({path}){Style.RESET_ALL}')
logger.info(
f"{Fore.CYAN}Decrypting: {serial_number} ({relative_path}){Style.RESET_ALL}"
)
dest_path = os.path.join(output_dir, path) if output_dir else path
dest_path = (
os.path.join(output_dir, relative_path)
if output_dir
else os.path.abspath(relative_path) # resolve with working dir
) # abs or rel, must has a dirname, must not ends with slash
dest_dir = os.path.dirname(dest_path)
if not os.path.exists(dest_dir):
os.makedirs(dest_dir)
if args.export_raw_data:
with open(dest_path + '.1shot.raw', 'wb') as f:
with open(dest_path + ".1shot.raw", "wb") as f:
f.write(data)
# Check BCC
if int.from_bytes(data[20:24], 'little') == 9:
cipher_text_offset = int.from_bytes(data[28:32], 'little')
cipher_text_length = int.from_bytes(data[32:36], 'little')
# Check BCC; mutates "data"
if int.from_bytes(data[20:24], "little") == 9:
cipher_text_offset = int.from_bytes(data[28:32], "little")
cipher_text_length = int.from_bytes(data[32:36], "little")
nonce = data[36:40] + data[44:52]
bcc_aes_decrypted = general_aes_ctr_decrypt(
data[cipher_text_offset:cipher_text_offset+cipher_text_length], runtime.runtime_aes_key, nonce)
data = data[int.from_bytes(data[56:60], 'little'):]
data[
cipher_text_offset : cipher_text_offset + cipher_text_length
],
runtime.runtime_aes_key,
nonce,
)
data = data[int.from_bytes(data[56:60], "little") :]
bcc_architecture_mapping = {
0x2001: 'win-x64',
0x2003: 'linux-x64',
0x2001: "win-x64",
0x2003: "linux-x64",
}
while True:
if len(bcc_aes_decrypted) < 16:
break
bcc_segment_offset = int.from_bytes(bcc_aes_decrypted[0:4], 'little')
bcc_segment_length = int.from_bytes(bcc_aes_decrypted[4:8], 'little')
bcc_architecture_id = int.from_bytes(bcc_aes_decrypted[8:12], 'little')
bcc_next_segment_offset = int.from_bytes(bcc_aes_decrypted[12:16], 'little')
bcc_architecture = bcc_architecture_mapping.get(bcc_architecture_id, f'0x{bcc_architecture_id:x}')
bcc_file_path = f'{dest_path}.1shot.bcc.{bcc_architecture}.so'
with open(bcc_file_path, 'wb') as f:
f.write(bcc_aes_decrypted[bcc_segment_offset:bcc_segment_offset+bcc_segment_length])
logger.info(f'{Fore.GREEN}Extracted BCC mode native part: {bcc_file_path}{Style.RESET_ALL}')
bcc_segment_offset = int.from_bytes(
bcc_aes_decrypted[0:4], "little"
)
bcc_segment_length = int.from_bytes(
bcc_aes_decrypted[4:8], "little"
)
bcc_architecture_id = int.from_bytes(
bcc_aes_decrypted[8:12], "little"
)
bcc_next_segment_offset = int.from_bytes(
bcc_aes_decrypted[12:16], "little"
)
bcc_architecture = bcc_architecture_mapping.get(
bcc_architecture_id, f"0x{bcc_architecture_id:x}"
)
bcc_file_path = f"{dest_path}.1shot.bcc.{bcc_architecture}.so"
with open(bcc_file_path, "wb") as f:
f.write(
bcc_aes_decrypted[
bcc_segment_offset : bcc_segment_offset
+ bcc_segment_length
]
)
logger.info(
f"{Fore.GREEN}Extracted BCC mode native part: {bcc_file_path}{Style.RESET_ALL}"
)
if bcc_next_segment_offset == 0:
break
bcc_aes_decrypted = bcc_aes_decrypted[bcc_next_segment_offset:]
cipher_text_offset = int.from_bytes(data[28:32], 'little')
cipher_text_length = int.from_bytes(data[32:36], 'little')
cipher_text_offset = int.from_bytes(data[28:32], "little")
cipher_text_length = int.from_bytes(data[32:36], "little")
nonce = data[36:40] + data[44:52]
seq_file_path = dest_path + '.1shot.seq'
with open(seq_file_path, 'wb') as f:
f.write(b'\xa1' + runtime.runtime_aes_key)
f.write(b'\xa2' + runtime.mix_str_aes_nonce())
f.write(b'\xf0\xff')
seq_file_path = dest_path + ".1shot.seq"
with open(seq_file_path, "wb") as f:
f.write(b"\xa1" + runtime.runtime_aes_key)
f.write(b"\xa2" + runtime.mix_str_aes_nonce())
f.write(b"\xf0\xff")
f.write(data[:cipher_text_offset])
f.write(general_aes_ctr_decrypt(
data[cipher_text_offset:cipher_text_offset+cipher_text_length], runtime.runtime_aes_key, nonce))
f.write(data[cipher_text_offset+cipher_text_length:])
f.write(
general_aes_ctr_decrypt(
data[
cipher_text_offset : cipher_text_offset
+ cipher_text_length
],
runtime.runtime_aes_key,
nonce,
)
)
f.write(data[cipher_text_offset + cipher_text_length :])
# Run without timeout
await decrypt_file_async(exe_path, seq_file_path, path, args)
await run_pycdc_async(
exe_path,
seq_file_path,
relative_path,
no_banner=args.no_banner,
show_all=args.show_all,
show_err_opcode=args.show_err_opcode,
show_warn_stack=args.show_warn_stack,
)
except Exception as e:
error_details = traceback.format_exc()
logger.error(f'{Fore.RED}Decrypt failed: {e} ({path}){Style.RESET_ALL}')
logger.error(f'{Fore.RED}Error details: {error_details}{Style.RESET_ALL}')
# Create tasks for all files
logger.error(
f"{Fore.RED}Decrypt failed: {e} ({relative_path}){Style.RESET_ALL}"
)
logger.error(
f"{Fore.RED}Error details: {error_details}{Style.RESET_ALL}"
)
tasks = [process_file(path, data) for path, data in sequences]
# Run all tasks concurrently
await asyncio.gather(*tasks)
def decrypt_process(runtimes: Dict[str, RuntimeInfo], sequences: List[Tuple[str, bytes]], args):
def decrypt_process(
runtimes: Dict[str, RuntimeInfo], sequences: List[Tuple[str, bytes]], args
):
asyncio.run(decrypt_process_async(runtimes, sequences, args))
def get_platform_executable(args) -> str:
"""
Get the appropriate executable for the current platform
"""
logger = logging.getLogger('shot')
def get_platform_executable(specified: str) -> str:
logger = logging.getLogger("shot")
# If a specific executable is provided, use it
if args.executable:
if os.path.exists(args.executable):
logger.info(f'{Fore.GREEN}Using specified executable: {args.executable}{Style.RESET_ALL}')
return args.executable
if specified:
if os.path.exists(specified):
logger.info(
f"{Fore.GREEN}Using specified executable: {specified}{Style.RESET_ALL}"
)
return specified
else:
logger.warning(f'{Fore.YELLOW}Specified executable not found: {args.executable}{Style.RESET_ALL}')
logger.warning(
f"{Fore.YELLOW}Specified executable not found: {specified}{Style.RESET_ALL}"
)
oneshot_dir = os.path.dirname(os.path.abspath(__file__))
@@ -182,90 +309,96 @@ def get_platform_executable(args) -> str:
machine = platform.machine().lower()
# Check for architecture-specific executables
arch_specific_exe = f'pyarmor-1shot-{system}-{machine}'
if system == 'windows':
arch_specific_exe += '.exe'
arch_specific_exe = f"pyarmor-1shot-{system}-{machine}"
if system == "windows":
arch_specific_exe += ".exe"
arch_exe_path = os.path.join(oneshot_dir, arch_specific_exe)
if os.path.exists(arch_exe_path):
logger.info(f'{Fore.GREEN}Using architecture-specific executable: {arch_specific_exe}{Style.RESET_ALL}')
logger.info(
f"{Fore.GREEN}Using architecture-specific executable: {arch_specific_exe}{Style.RESET_ALL}"
)
return arch_exe_path
# Allow ".elf" and ".macho" suffixes, so that they can exist in the same folder
platform_map = {
'windows': 'pyarmor-1shot.exe',
'linux': 'pyarmor-1shot',
'darwin': 'pyarmor-1shot',
"windows": ["pyarmor-1shot.exe", "pyarmor-1shot"],
"linux": ["pyarmor-1shot", "pyarmor-1shot.elf"],
"darwin": ["pyarmor-1shot", "pyarmor-1shot.macho"],
}
base_exe_name = platform_map.get(system, 'pyarmor-1shot')
# Then check for platform-specific executable
platform_exe_path = os.path.join(oneshot_dir, base_exe_name)
if os.path.exists(platform_exe_path):
logger.info(f'{Fore.GREEN}Using executable: {base_exe_name}{Style.RESET_ALL}')
return platform_exe_path
for base_exe_name in platform_map.get(system, ["pyarmor-1shot"]):
platform_exe_path = os.path.join(oneshot_dir, base_exe_name)
if os.path.exists(platform_exe_path):
logger.info(
f"{Fore.GREEN}Using executable: {base_exe_name}{Style.RESET_ALL}"
)
return platform_exe_path
# Finally, check for generic executable
generic_exe_path = os.path.join(oneshot_dir, 'pyarmor-1shot')
if os.path.exists(generic_exe_path):
logger.info(f'{Fore.GREEN}Using executable: pyarmor-1shot{Style.RESET_ALL}')
return generic_exe_path
logger.critical(f'{Fore.RED}Executable {base_exe_name} not found, please build it first or download on https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot/releases {Style.RESET_ALL}')
platform_default = platform_map.get(system, ["pyarmor-1shot"])[0]
logger.critical(
f"{Fore.RED}Executable {platform_default} not found, please build it first or download on https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot/releases {Style.RESET_ALL}"
)
exit(1)
def parse_args():
parser = argparse.ArgumentParser(
description='Pyarmor Static Unpack 1 Shot Entry')
parser = argparse.ArgumentParser(description="Pyarmor Static Unpack 1 Shot Entry")
parser.add_argument(
'directory',
"directory",
help='the "root" directory of obfuscated scripts',
type=str,
)
parser.add_argument(
'-r',
'--runtime',
help='path to pyarmor_runtime[.pyd|.so|.dylib]',
"-r",
"--runtime",
help="path to pyarmor_runtime[.pyd|.so|.dylib]",
type=str, # argparse.FileType('rb'),
)
parser.add_argument(
'-o',
'--output-dir',
help='save output files in another directory instead of in-place, with folder structure remain unchanged',
"-o",
"--output-dir",
help="save output files in another directory instead of in-place, with folder structure remain unchanged",
type=str,
)
parser.add_argument(
'--export-raw-data',
help='save data found in source files as-is',
action='store_true',
"--export-raw-data",
help="save data found in source files as-is",
action="store_true",
)
parser.add_argument(
'--show-all',
help='show all pycdc errors and warnings',
action='store_true',
"--show-all",
help="show all pycdc errors and warnings",
action="store_true",
)
parser.add_argument(
'--show-err-opcode',
help='show pycdc unsupported opcode errors',
action='store_true',
"--show-err-opcode",
help="show pycdc unsupported opcode errors",
action="store_true",
)
parser.add_argument(
'--show-warn-stack',
help='show pycdc stack related warnings',
action='store_true',
"--show-warn-stack",
help="show pycdc stack related warnings",
action="store_true",
)
parser.add_argument(
'--concurrent',
help='number of concurrent deobfuscation processes (default: 4)',
"--concurrent",
help="number of concurrent handling processes (default: 4)",
type=int,
default=4,
)
parser.add_argument(
'-e',
'--executable',
help='path to the pyarmor-1shot executable to use',
"-e",
"--executable",
help="path to the pyarmor-1shot executable to use",
type=str,
)
parser.add_argument(
"--no-banner",
help="do not show banner in console and output files",
action="store_true",
)
return parser.parse_args()
@@ -273,11 +406,12 @@ def main():
args = parse_args()
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)-28s %(message)s',
format="%(levelname)-8s %(asctime)-28s %(message)s",
)
logger = logging.getLogger('shot')
logger = logging.getLogger("shot")
print(Fore.CYAN + r'''
if not args.no_banner:
print(rf"""{Fore.CYAN}
____ ____
( __ ) ( __ )
| |~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~| |
@@ -288,11 +422,11 @@ def main():
| | |_| \_, |\__,_|_| |_||_||_|\___/|_| |_|___/|_||_|\___/ \__| | |
| | |__/ | |
|__|~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~|__|
(____) v0.2.1 (____)
(____) v0.2.1+ (____)
For technology exchange only. Use at your own risk.
GitHub: https://github.com/Lil-House/Pyarmor-Static-Unpack-1shot
''' + Style.RESET_ALL)
{Style.RESET_ALL}""")
if args.runtime:
specified_runtime = RuntimeInfo(args.runtime)
@@ -300,71 +434,89 @@ def main():
runtimes = {specified_runtime.serial_number: specified_runtime}
else:
specified_runtime = None
runtimes = {'000000': RuntimeInfo.default()}
sequences: List[Tuple[str, bytes]] = []
runtimes = {"000000": RuntimeInfo.default()}
if args.output_dir and not os.path.exists(args.output_dir):
os.makedirs(args.output_dir)
if args.output_dir and not os.path.isdir(args.output_dir):
logger.error(f'{Fore.RED}Cannot use {repr(args.output_dir)} as output directory{Style.RESET_ALL}')
logger.error(
f"{Fore.RED}Cannot use {repr(args.output_dir)} as output directory{Style.RESET_ALL}"
)
return
# Note for path handling:
# args.output_dir: is either None or an existing directory path, can be absolute or relative
# args.directory: before calling `decrypt_process`, it is an existing directory path, can be absolute or relative
# paths in `sequences`: must be relative file paths, without trailing slashes, can exist or not
if os.path.isfile(args.directory):
single_file_path = os.path.abspath(args.directory)
args.directory = os.path.dirname(single_file_path)
relative_path = os.path.basename(single_file_path)
if specified_runtime is None:
logger.error(f'{Fore.RED}Please specify `pyarmor_runtime` file by `-r` if input is a file{Style.RESET_ALL}')
logger.error(
f"{Fore.RED}Please specify `pyarmor_runtime` file by `-r` if input is a file{Style.RESET_ALL}"
)
return
logger.info(f'{Fore.CYAN}Single file mode{Style.RESET_ALL}')
result = detect_process(args.directory, args.directory)
if result is None:
logger.error(f'{Fore.RED}No armored data found{Style.RESET_ALL}')
logger.info(f"{Fore.CYAN}Single file mode{Style.RESET_ALL}")
single_file_sequences = detect_process(single_file_path, relative_path)
if single_file_sequences is None:
logger.error(f"{Fore.RED}No armored data found{Style.RESET_ALL}")
return
sequences.extend(result)
decrypt_process(runtimes, sequences, args)
decrypt_process(runtimes, single_file_sequences, args)
return # single file mode ends here
sequences: List[Tuple[str, bytes]] = []
dir_path: str
dirs: List[str]
files: List[str]
for dir_path, dirs, files in os.walk(args.directory, followlinks=False):
if '.no1shot' in files:
logger.info(f'{Fore.YELLOW}Skipping {dir_path} because of `.no1shot`{Style.RESET_ALL}')
if ".no1shot" in files:
logger.info(
f"{Fore.YELLOW}Skipping {dir_path} because of `.no1shot`{Style.RESET_ALL}"
)
dirs.clear()
files.clear()
continue
for d in ['__pycache__', 'site-packages']:
for d in ["__pycache__", "site-packages"]:
if d in dirs:
dirs.remove(d)
for file_name in files:
if '.1shot.' in file_name:
if ".1shot." in file_name:
continue
file_path = os.path.join(dir_path, file_name)
relative_path = os.path.relpath(file_path, args.directory)
if file_name.endswith('.pyz'):
with open(file_path, 'rb') as f:
if file_name.endswith(".pyz"):
with open(file_path, "rb") as f:
head = f.read(16 * 1024 * 1024)
if b'PY00' in head \
and (not os.path.exists(file_path + '_extracted')
or len(os.listdir(file_path + '_extracted')) == 0):
if b"PY00" in head and (
not os.path.exists(file_path + "_extracted")
or len(os.listdir(file_path + "_extracted")) == 0
):
logger.error(
f'{Fore.RED}A PYZ file containing armored data is detected, but the PYZ file has not been extracted by other tools. This error is not a problem with this tool. If the folder is extracted by Pyinstxtractor, please read the output information of Pyinstxtractor carefully. ({relative_path}){Style.RESET_ALL}')
f"{Fore.RED}A PYZ file containing armored data is detected, but the PYZ file has not been extracted by other tools. This error is not a problem with this tool. If the folder is extracted by Pyinstxtractor, please read the output information of Pyinstxtractor carefully. ({relative_path}){Style.RESET_ALL}"
)
continue
# is pyarmor_runtime?
if specified_runtime is None \
and file_name.startswith('pyarmor_runtime') \
and file_name.endswith(('.pyd', '.so', '.dylib')):
if (
specified_runtime is None
and file_name.startswith("pyarmor_runtime")
and file_name.endswith((".pyd", ".so", ".dylib"))
):
try:
new_runtime = RuntimeInfo(file_path)
runtimes[new_runtime.serial_number] = new_runtime
logger.info(
f'{Fore.GREEN}Found new runtime: {new_runtime.serial_number} ({file_path}){Style.RESET_ALL}')
f"{Fore.GREEN}Found new runtime: {new_runtime.serial_number} ({file_path}){Style.RESET_ALL}"
)
print(new_runtime)
continue
except:
except Exception:
pass
result = detect_process(file_path, relative_path)
@@ -372,13 +524,13 @@ def main():
sequences.extend(result)
if not runtimes:
logger.error(f'{Fore.RED}No runtime found{Style.RESET_ALL}')
logger.error(f"{Fore.RED}No `pyarmor_runtime` file found{Style.RESET_ALL}")
return
if not sequences:
logger.error(f'{Fore.RED}No armored data found{Style.RESET_ALL}')
logger.error(f"{Fore.RED}No armored data found{Style.RESET_ALL}")
return
decrypt_process(runtimes, sequences, args)
if __name__ == '__main__':
if __name__ == "__main__":
main()

6
oneshot/util.py Normal file
View File

@@ -0,0 +1,6 @@
def dword(buffer, idx: int) -> int:
return int.from_bytes(buffer[idx : idx + 4], "little")
def bytes_sub(buffer, start: int, length: int) -> int:
return buffer[start : start + length]

View File

@@ -267,6 +267,11 @@ PycRef<ASTNode> BuildFromCode(PycRef<PycCode> code, PycModule* mod)
bool need_try = false;
bool variable_annotations = false;
// BEGIN ONESHOT TEMPORARY PATCH
// For Pyarmor generated `NOP; JUMP_FORWARD` sequences
bool last_is_nop = false;
// END ONESHOT PATCH
while (!source.atEof()) {
#if defined(BLOCK_DEBUG) || defined(STACK_DEBUG)
fprintf(stderr, "%-7d", pos);
@@ -333,6 +338,33 @@ PycRef<ASTNode> BuildFromCode(PycRef<PycCode> code, PycModule* mod)
}
}
// BEGIN ONESHOT TEMPORARY PATCH
// For Pyarmor generated `NOP; JUMP_FORWARD` sequences
if (last_is_nop && opcode == Pyc::JUMP_FORWARD_A) {
int offs = operand;
if (mod->verCompare(3, 10) >= 0)
offs *= sizeof(uint16_t);
// If destination is a:
// LOAD_CONST '__pyarmor_exit_N__'
// Then change JUMP_FORWARD to RETURN_VALUE
const char* code_bytes = code->code()->value();
for (int i = 0; i < 10; i += 2) {
if (pos + offs + i + 1 >= code->code()->length())
break;
int tested_opcode = Pyc::ByteToOpcode(mod->majorVer(), mod->minorVer(), code_bytes[pos + offs + i]);
if (tested_opcode == Pyc::LOAD_CONST_A) {
unsigned char tested_operand = code_bytes[pos + offs + i + 1];
auto str = code->getConst(tested_operand).try_cast<PycString>();
if (str != nullptr && str->startsWith("__pyarmor_exit_")) {
opcode = Pyc::RETURN_VALUE;
break;
}
}
}
}
// END ONESHOT PATCH
switch (opcode) {
case Pyc::BINARY_OP_A:
{
@@ -2164,7 +2196,9 @@ PycRef<ASTNode> BuildFromCode(PycRef<PycCode> code, PycModule* mod)
curblock = blocks.top();
curblock->append(prev.cast<ASTNode>());
bc_next(source, mod, opcode, operand, pos);
// BEGIN ONESHOT TEMPORARY PATCH
// bc_next(source, mod, opcode, operand, pos);
// END ONESHOT PATCH
}
}
break;
@@ -3002,6 +3036,10 @@ PycRef<ASTNode> BuildFromCode(PycRef<PycCode> code, PycModule* mod)
|| (curblock->blktype() == ASTBlock::BLK_IF)
|| (curblock->blktype() == ASTBlock::BLK_ELIF) )
&& (curblock->end() == pos);
// BEGIN ONESHOT TEMPORARY PATCH
last_is_nop = (opcode == Pyc::NOP);
// END ONESHOT PATCH
}
if (stack_hist.size()) {

View File

@@ -66,6 +66,7 @@ add_executable(pyarmor-1shot
bytes/python_3_11.cpp
bytes/python_3_12.cpp
bytes/python_3_13.cpp
bytes/python_3_14.cpp
)
install(TARGETS pyarmor-1shot

View File

@@ -39,6 +39,7 @@ DECLARE_PYTHON(3, 10)
DECLARE_PYTHON(3, 11)
DECLARE_PYTHON(3, 12)
DECLARE_PYTHON(3, 13)
DECLARE_PYTHON(3, 14)
const char* Pyc::OpcodeName(int opcode)
{
@@ -109,6 +110,7 @@ int Pyc::ByteToOpcode(int maj, int min, int opcode)
case 11: return python_3_11_map(opcode);
case 12: return python_3_12_map(opcode);
case 13: return python_3_13_map(opcode);
case 14: return python_3_14_map(opcode);
}
break;
}

View File

@@ -124,6 +124,104 @@ OPCODE(FORMAT_WITH_SPEC) // Python 3.13 ->
OPCODE(MAKE_FUNCTION) // Python 3.13 ->
OPCODE(TO_BOOL) // Python 3.13 ->
OPCODE(BUILD_TEMPLATE) // Python 3.14 ->
OPCODE(BINARY_OP_INPLACE_ADD_UNICODE)
OPCODE(NOT_TAKEN)
OPCODE(POP_ITER)
OPCODE(BUILD_INTERPOLATION)
OPCODE(LOAD_COMMON_CONSTANT)
OPCODE(LOAD_FAST_BORROW)
OPCODE(LOAD_FAST_BORROW_LOAD_FAST_BORROW)
OPCODE(LOAD_SPECIAL)
OPCODE(BINARY_OP_ADD_FLOAT)
OPCODE(BINARY_OP_ADD_INT)
OPCODE(BINARY_OP_ADD_UNICODE)
OPCODE(BINARY_OP_EXTEND)
OPCODE(BINARY_OP_MULTIPLY_FLOAT)
OPCODE(BINARY_OP_MULTIPLY_INT)
OPCODE(BINARY_OP_SUBSCR_DICT)
OPCODE(BINARY_OP_SUBSCR_GETITEM)
OPCODE(BINARY_OP_SUBSCR_LIST_INT)
OPCODE(BINARY_OP_SUBSCR_LIST_SLICE)
OPCODE(BINARY_OP_SUBSCR_STR_INT)
OPCODE(BINARY_OP_SUBSCR_TUPLE_INT)
OPCODE(BINARY_OP_SUBTRACT_FLOAT)
OPCODE(BINARY_OP_SUBTRACT_INT)
OPCODE(CALL_ALLOC_AND_ENTER_INIT)
OPCODE(CALL_BOUND_METHOD_EXACT_ARGS)
OPCODE(CALL_BOUND_METHOD_GENERAL)
OPCODE(CALL_BUILTIN_CLASS)
OPCODE(CALL_BUILTIN_FAST)
OPCODE(CALL_BUILTIN_FAST_WITH_KEYWORDS)
OPCODE(CALL_BUILTIN_O)
OPCODE(CALL_ISINSTANCE)
OPCODE(CALL_KW_BOUND_METHOD)
OPCODE(CALL_KW_NON_PY)
OPCODE(CALL_KW_PY)
OPCODE(CALL_LEN)
OPCODE(CALL_LIST_APPEND)
OPCODE(CALL_METHOD_DESCRIPTOR_FAST)
OPCODE(CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS)
OPCODE(CALL_METHOD_DESCRIPTOR_NOARGS)
OPCODE(CALL_METHOD_DESCRIPTOR_O)
OPCODE(CALL_NON_PY_GENERAL)
OPCODE(CALL_PY_EXACT_ARGS)
OPCODE(CALL_PY_GENERAL)
OPCODE(CALL_STR_1)
OPCODE(CALL_TUPLE_1)
OPCODE(CALL_TYPE_1)
OPCODE(COMPARE_OP_FLOAT)
OPCODE(COMPARE_OP_INT)
OPCODE(COMPARE_OP_STR)
OPCODE(CONTAINS_OP_DICT)
OPCODE(CONTAINS_OP_SET)
OPCODE(FOR_ITER_GEN)
OPCODE(FOR_ITER_LIST)
OPCODE(FOR_ITER_RANGE)
OPCODE(FOR_ITER_TUPLE)
OPCODE(JUMP_BACKWARD_JIT)
OPCODE(JUMP_BACKWARD_NO_JIT)
OPCODE(LOAD_ATTR_CLASS)
OPCODE(LOAD_ATTR_CLASS_WITH_METACLASS_CHECK)
OPCODE(LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN)
OPCODE(LOAD_ATTR_INSTANCE_VALUE)
OPCODE(LOAD_ATTR_METHOD_LAZY_DICT)
OPCODE(LOAD_ATTR_METHOD_NO_DICT)
OPCODE(LOAD_ATTR_METHOD_WITH_VALUES)
OPCODE(LOAD_ATTR_MODULE)
OPCODE(LOAD_ATTR_NONDESCRIPTOR_NO_DICT)
OPCODE(LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES)
OPCODE(LOAD_ATTR_PROPERTY)
OPCODE(LOAD_ATTR_SLOT)
OPCODE(LOAD_ATTR_WITH_HINT)
OPCODE(LOAD_CONST_IMMORTAL)
OPCODE(LOAD_CONST_MORTAL)
OPCODE(LOAD_GLOBAL_BUILTIN)
OPCODE(LOAD_GLOBAL_MODULE)
OPCODE(LOAD_SUPER_ATTR_ATTR)
OPCODE(LOAD_SUPER_ATTR_METHOD)
OPCODE(RESUME_CHECK)
OPCODE(SEND_GEN)
OPCODE(STORE_ATTR_INSTANCE_VALUE)
OPCODE(STORE_ATTR_SLOT)
OPCODE(STORE_ATTR_WITH_HINT)
OPCODE(STORE_SUBSCR_DICT)
OPCODE(STORE_SUBSCR_LIST_INT)
OPCODE(TO_BOOL_ALWAYS_TRUE)
OPCODE(TO_BOOL_BOOL)
OPCODE(TO_BOOL_INT)
OPCODE(TO_BOOL_LIST)
OPCODE(TO_BOOL_NONE)
OPCODE(TO_BOOL_STR)
OPCODE(UNPACK_SEQUENCE_LIST)
OPCODE(UNPACK_SEQUENCE_TUPLE)
OPCODE(UNPACK_SEQUENCE_TWO_TUPLE)
OPCODE(ANNOTATIONS_PLACEHOLDER)
OPCODE(JUMP)
OPCODE(JUMP_NO_INTERRUPT)
OPCODE(SETUP_CLEANUP)
OPCODE(STORE_FAST_MAYBE_NULL)
/* Has parameter word */
OPCODE_A_FIRST(STORE_NAME) // Python 1.0 -> names[A]
OPCODE_A(DELETE_NAME) // Python 1.0 -> names[A]
@@ -269,6 +367,7 @@ OPCODE_A(LOAD_FAST_LOAD_FAST) // Python 3.13 -> A=locals
OPCODE_A(SET_FUNCTION_ATTRIBUTE) // Python 3.13 -> A=attribute_type
OPCODE_A(STORE_FAST_LOAD_FAST) // Python 3.13 -> A=locals[A<<4]+locals[A&0xf]
OPCODE_A(STORE_FAST_STORE_FAST) // Python 3.13 -> A=locals[A<<4]+locals[A&0xf]
OPCODE_A(LOAD_SMALL_INT) // Python 3.14 -> A=small int range(256)
/* Instrumented opcodes */
OPCODE_A(INSTRUMENTED_LOAD_SUPER_ATTR) // Python 3.12 -> (see LOAD_SUPER_ATTR)
@@ -290,3 +389,6 @@ OPCODE_A(INSTRUMENTED_END_SEND) // Python 3.12 -> (see END
OPCODE_A(INSTRUMENTED_INSTRUCTION) // Python 3.12 -> A=(unused)
OPCODE_A(INSTRUMENTED_LINE) // Python 3.12 -> ???
OPCODE_A(INSTRUMENTED_CALL_KW) // Python 3.13 -> (see CALL_KW)
OPCODE_A(INSTRUMENTED_POP_ITER) // Python 3.14 -> (see POP_ITER)
OPCODE_A(INSTRUMENTED_NOT_TAKEN) // Python 3.14 -> (see NOT_TAKEN)
OPCODE_A(INSTRUMENTED_END_ASYNC_FOR) // Python 3.14 -> (see END_ASYNC_FOR)

242
pycdc/bytes/python_3_14.cpp Normal file
View File

@@ -0,0 +1,242 @@
#include "bytecode_map.h"
BEGIN_MAP(3, 14)
MAP_OP(0, CACHE)
MAP_OP(1, BINARY_SLICE)
MAP_OP(2, BUILD_TEMPLATE)
MAP_OP(3, BINARY_OP_INPLACE_ADD_UNICODE)
MAP_OP(4, CALL_FUNCTION_EX_A)
MAP_OP(5, CHECK_EG_MATCH)
MAP_OP(6, CHECK_EXC_MATCH)
MAP_OP(7, CLEANUP_THROW)
MAP_OP(8, DELETE_SUBSCR)
MAP_OP(9, END_FOR)
MAP_OP(10, END_SEND)
MAP_OP(11, EXIT_INIT_CHECK)
MAP_OP(12, FORMAT_SIMPLE)
MAP_OP(13, FORMAT_WITH_SPEC)
MAP_OP(14, GET_AITER)
MAP_OP(15, GET_ANEXT)
MAP_OP(16, GET_ITER)
MAP_OP(17, RESERVED)
MAP_OP(18, GET_LEN)
MAP_OP(19, GET_YIELD_FROM_ITER)
MAP_OP(20, INTERPRETER_EXIT)
MAP_OP(21, LOAD_BUILD_CLASS)
MAP_OP(22, LOAD_LOCALS)
MAP_OP(23, MAKE_FUNCTION)
MAP_OP(24, MATCH_KEYS)
MAP_OP(25, MATCH_MAPPING)
MAP_OP(26, MATCH_SEQUENCE)
MAP_OP(27, NOP)
MAP_OP(28, NOT_TAKEN)
MAP_OP(29, POP_EXCEPT)
MAP_OP(30, POP_ITER)
MAP_OP(31, POP_TOP)
MAP_OP(32, PUSH_EXC_INFO)
MAP_OP(33, PUSH_NULL)
MAP_OP(34, RETURN_GENERATOR)
MAP_OP(35, RETURN_VALUE)
MAP_OP(36, SETUP_ANNOTATIONS)
MAP_OP(37, STORE_SLICE)
MAP_OP(38, STORE_SUBSCR)
MAP_OP(39, TO_BOOL)
MAP_OP(40, UNARY_INVERT)
MAP_OP(41, UNARY_NEGATIVE)
MAP_OP(42, UNARY_NOT)
MAP_OP(43, WITH_EXCEPT_START)
MAP_OP(44, BINARY_OP_A)
MAP_OP(45, BUILD_INTERPOLATION)
MAP_OP(46, BUILD_LIST_A)
MAP_OP(47, BUILD_MAP_A)
MAP_OP(48, BUILD_SET_A)
MAP_OP(49, BUILD_SLICE_A)
MAP_OP(50, BUILD_STRING_A)
MAP_OP(51, BUILD_TUPLE_A)
MAP_OP(52, CALL_A)
MAP_OP(53, CALL_INTRINSIC_1_A)
MAP_OP(54, CALL_INTRINSIC_2_A)
MAP_OP(55, CALL_KW_A)
MAP_OP(56, COMPARE_OP_A)
MAP_OP(57, CONTAINS_OP_A)
MAP_OP(58, CONVERT_VALUE_A)
MAP_OP(59, COPY_A)
MAP_OP(60, COPY_FREE_VARS_A)
MAP_OP(61, DELETE_ATTR_A)
MAP_OP(62, DELETE_DEREF_A)
MAP_OP(63, DELETE_FAST_A)
MAP_OP(64, DELETE_GLOBAL_A)
MAP_OP(65, DELETE_NAME_A)
MAP_OP(66, DICT_MERGE_A)
MAP_OP(67, DICT_UPDATE_A)
MAP_OP(68, END_ASYNC_FOR)
MAP_OP(69, EXTENDED_ARG_A)
MAP_OP(70, FOR_ITER_A)
MAP_OP(71, GET_AWAITABLE_A)
MAP_OP(72, IMPORT_FROM_A)
MAP_OP(73, IMPORT_NAME_A)
MAP_OP(74, IS_OP_A)
MAP_OP(75, JUMP_BACKWARD_A)
MAP_OP(76, JUMP_BACKWARD_NO_INTERRUPT_A)
MAP_OP(77, JUMP_FORWARD_A)
MAP_OP(78, LIST_APPEND)
MAP_OP(79, LIST_EXTEND_A)
MAP_OP(80, LOAD_ATTR_A)
MAP_OP(81, LOAD_COMMON_CONSTANT)
MAP_OP(82, LOAD_CONST_A)
MAP_OP(83, LOAD_DEREF_A)
MAP_OP(84, LOAD_FAST_A)
MAP_OP(85, LOAD_FAST_AND_CLEAR_A)
MAP_OP(86, LOAD_FAST_BORROW)
MAP_OP(87, LOAD_FAST_BORROW_LOAD_FAST_BORROW)
MAP_OP(88, LOAD_FAST_CHECK_A)
MAP_OP(89, LOAD_FAST_LOAD_FAST_A)
MAP_OP(90, LOAD_FROM_DICT_OR_DEREF_A)
MAP_OP(91, LOAD_FROM_DICT_OR_GLOBALS_A)
MAP_OP(92, LOAD_GLOBAL_A)
MAP_OP(93, LOAD_NAME_A)
MAP_OP(94, LOAD_SMALL_INT_A)
MAP_OP(95, LOAD_SPECIAL)
MAP_OP(96, LOAD_SUPER_ATTR_A)
MAP_OP(97, MAKE_CELL_A)
MAP_OP(98, MAP_ADD_A)
MAP_OP(99, MATCH_CLASS_A)
MAP_OP(100, POP_JUMP_IF_FALSE_A)
MAP_OP(101, POP_JUMP_IF_NONE_A)
MAP_OP(102, POP_JUMP_IF_NOT_NONE_A)
MAP_OP(103, POP_JUMP_IF_TRUE_A)
MAP_OP(104, RAISE_VARARGS_A)
MAP_OP(105, RERAISE)
MAP_OP(106, SEND_A)
MAP_OP(107, SET_ADD)
MAP_OP(108, SET_FUNCTION_ATTRIBUTE_A)
MAP_OP(109, SET_UPDATE_A)
MAP_OP(110, STORE_ATTR_A)
MAP_OP(111, STORE_DEREF_A)
MAP_OP(112, STORE_FAST_A)
MAP_OP(113, STORE_FAST_LOAD_FAST_A)
MAP_OP(114, STORE_FAST_STORE_FAST_A)
MAP_OP(115, STORE_GLOBAL_A)
MAP_OP(116, STORE_NAME_A)
MAP_OP(117, SWAP_A)
MAP_OP(118, UNPACK_EX_A)
MAP_OP(119, UNPACK_SEQUENCE_A)
MAP_OP(120, YIELD_VALUE_A)
MAP_OP(128, RESUME_A)
MAP_OP(129, BINARY_OP_ADD_FLOAT)
MAP_OP(130, BINARY_OP_ADD_INT)
MAP_OP(131, BINARY_OP_ADD_UNICODE)
MAP_OP(132, BINARY_OP_EXTEND)
MAP_OP(133, BINARY_OP_MULTIPLY_FLOAT)
MAP_OP(134, BINARY_OP_MULTIPLY_INT)
MAP_OP(135, BINARY_OP_SUBSCR_DICT)
MAP_OP(136, BINARY_OP_SUBSCR_GETITEM)
MAP_OP(137, BINARY_OP_SUBSCR_LIST_INT)
MAP_OP(138, BINARY_OP_SUBSCR_LIST_SLICE)
MAP_OP(139, BINARY_OP_SUBSCR_STR_INT)
MAP_OP(140, BINARY_OP_SUBSCR_TUPLE_INT)
MAP_OP(141, BINARY_OP_SUBTRACT_FLOAT)
MAP_OP(142, BINARY_OP_SUBTRACT_INT)
MAP_OP(143, CALL_ALLOC_AND_ENTER_INIT)
MAP_OP(144, CALL_BOUND_METHOD_EXACT_ARGS)
MAP_OP(145, CALL_BOUND_METHOD_GENERAL)
MAP_OP(146, CALL_BUILTIN_CLASS)
MAP_OP(147, CALL_BUILTIN_FAST)
MAP_OP(148, CALL_BUILTIN_FAST_WITH_KEYWORDS)
MAP_OP(149, CALL_BUILTIN_O)
MAP_OP(150, CALL_ISINSTANCE)
MAP_OP(151, CALL_KW_BOUND_METHOD)
MAP_OP(152, CALL_KW_NON_PY)
MAP_OP(153, CALL_KW_PY)
MAP_OP(154, CALL_LEN)
MAP_OP(155, CALL_LIST_APPEND)
MAP_OP(156, CALL_METHOD_DESCRIPTOR_FAST)
MAP_OP(157, CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS)
MAP_OP(158, CALL_METHOD_DESCRIPTOR_NOARGS)
MAP_OP(159, CALL_METHOD_DESCRIPTOR_O)
MAP_OP(160, CALL_NON_PY_GENERAL)
MAP_OP(161, CALL_PY_EXACT_ARGS)
MAP_OP(162, CALL_PY_GENERAL)
MAP_OP(163, CALL_STR_1)
MAP_OP(164, CALL_TUPLE_1)
MAP_OP(165, CALL_TYPE_1)
MAP_OP(166, COMPARE_OP_FLOAT)
MAP_OP(167, COMPARE_OP_INT)
MAP_OP(168, COMPARE_OP_STR)
MAP_OP(169, CONTAINS_OP_DICT)
MAP_OP(170, CONTAINS_OP_SET)
MAP_OP(171, FOR_ITER_GEN)
MAP_OP(172, FOR_ITER_LIST)
MAP_OP(173, FOR_ITER_RANGE)
MAP_OP(174, FOR_ITER_TUPLE)
MAP_OP(175, JUMP_BACKWARD_JIT)
MAP_OP(176, JUMP_BACKWARD_NO_JIT)
MAP_OP(177, LOAD_ATTR_CLASS)
MAP_OP(178, LOAD_ATTR_CLASS_WITH_METACLASS_CHECK)
MAP_OP(179, LOAD_ATTR_GETATTRIBUTE_OVERRIDDEN)
MAP_OP(180, LOAD_ATTR_INSTANCE_VALUE)
MAP_OP(181, LOAD_ATTR_METHOD_LAZY_DICT)
MAP_OP(182, LOAD_ATTR_METHOD_NO_DICT)
MAP_OP(183, LOAD_ATTR_METHOD_WITH_VALUES)
MAP_OP(184, LOAD_ATTR_MODULE)
MAP_OP(185, LOAD_ATTR_NONDESCRIPTOR_NO_DICT)
MAP_OP(186, LOAD_ATTR_NONDESCRIPTOR_WITH_VALUES)
MAP_OP(187, LOAD_ATTR_PROPERTY)
MAP_OP(188, LOAD_ATTR_SLOT)
MAP_OP(189, LOAD_ATTR_WITH_HINT)
MAP_OP(190, LOAD_CONST_IMMORTAL)
MAP_OP(191, LOAD_CONST_MORTAL)
MAP_OP(192, LOAD_GLOBAL_BUILTIN)
MAP_OP(193, LOAD_GLOBAL_MODULE)
MAP_OP(194, LOAD_SUPER_ATTR_ATTR)
MAP_OP(195, LOAD_SUPER_ATTR_METHOD)
MAP_OP(196, RESUME_CHECK)
MAP_OP(197, SEND_GEN)
MAP_OP(198, STORE_ATTR_INSTANCE_VALUE)
MAP_OP(199, STORE_ATTR_SLOT)
MAP_OP(200, STORE_ATTR_WITH_HINT)
MAP_OP(201, STORE_SUBSCR_DICT)
MAP_OP(202, STORE_SUBSCR_LIST_INT)
MAP_OP(203, TO_BOOL_ALWAYS_TRUE)
MAP_OP(204, TO_BOOL_BOOL)
MAP_OP(205, TO_BOOL_INT)
MAP_OP(206, TO_BOOL_LIST)
MAP_OP(207, TO_BOOL_NONE)
MAP_OP(208, TO_BOOL_STR)
MAP_OP(209, UNPACK_SEQUENCE_LIST)
MAP_OP(210, UNPACK_SEQUENCE_TUPLE)
MAP_OP(211, UNPACK_SEQUENCE_TWO_TUPLE)
MAP_OP(234, INSTRUMENTED_END_FOR_A)
MAP_OP(235, INSTRUMENTED_POP_ITER_A)
MAP_OP(236, INSTRUMENTED_END_SEND_A)
MAP_OP(237, INSTRUMENTED_FOR_ITER_A)
MAP_OP(238, INSTRUMENTED_INSTRUCTION_A)
MAP_OP(239, INSTRUMENTED_JUMP_FORWARD_A)
MAP_OP(240, INSTRUMENTED_NOT_TAKEN_A)
MAP_OP(241, INSTRUMENTED_POP_JUMP_IF_TRUE_A)
MAP_OP(242, INSTRUMENTED_POP_JUMP_IF_FALSE_A)
MAP_OP(243, INSTRUMENTED_POP_JUMP_IF_NONE_A)
MAP_OP(244, INSTRUMENTED_POP_JUMP_IF_NOT_NONE_A)
MAP_OP(245, INSTRUMENTED_RESUME_A)
MAP_OP(246, INSTRUMENTED_RETURN_VALUE_A)
MAP_OP(247, INSTRUMENTED_YIELD_VALUE_A)
MAP_OP(248, INSTRUMENTED_END_ASYNC_FOR_A)
MAP_OP(249, INSTRUMENTED_LOAD_SUPER_ATTR_A)
MAP_OP(250, INSTRUMENTED_CALL_A)
MAP_OP(251, INSTRUMENTED_CALL_KW_A)
MAP_OP(252, INSTRUMENTED_CALL_FUNCTION_EX_A)
MAP_OP(253, INSTRUMENTED_JUMP_BACKWARD_A)
MAP_OP(254, INSTRUMENTED_LINE_A)
MAP_OP(255, ENTER_EXECUTOR_A)
MAP_OP(256, ANNOTATIONS_PLACEHOLDER)
MAP_OP(257, JUMP)
MAP_OP(258, JUMP_IF_FALSE_A)
MAP_OP(259, JUMP_IF_TRUE_A)
MAP_OP(260, JUMP_NO_INTERRUPT)
MAP_OP(261, LOAD_CLOSURE_A)
MAP_OP(262, POP_BLOCK)
MAP_OP(263, SETUP_CLEANUP)
MAP_OP(264, SETUP_FINALLY_A)
MAP_OP(265, SETUP_WITH_A)
MAP_OP(266, STORE_FAST_MAYBE_NULL)
END_MAP()

View File

@@ -1,3 +1,19 @@
#include <signal.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <errno.h>
#ifndef _MSC_VER
#include <unistd.h>
#endif
#ifdef _WIN32
#include <windows.h>
#include <io.h>
#include <type_traits>
#endif
/** I want to use functions in pycdas.cpp directly, but not moving them to
* another file, to sync with upstream in the future easily.
*/
@@ -7,12 +23,76 @@
#include "ASTree.h"
const char* VERSION = "v0.2.1";
const char* VERSION = "v0.2.1+";
#ifdef _WIN32
// Windows: Use SEH/UEF; prefer calling only Win32 APIs
#ifdef __cpp_lib_fstream_native_handle
static HANDLE g_dc_h = INVALID_HANDLE_VALUE;
static HANDLE g_das_h = INVALID_HANDLE_VALUE;
#endif
static LONG WINAPI av_handler(EXCEPTION_POINTERS* /*ep*/) {
const char msg[] = "Access violation caught. Best-effort FlushFileBuffers.\n";
DWORD wrote = 0;
WriteFile(GetStdHandle(STD_ERROR_HANDLE), msg, sizeof(msg) - 1, &wrote, nullptr);
#ifdef __cpp_lib_fstream_native_handle
if (g_das_h != INVALID_HANDLE_VALUE) FlushFileBuffers(g_das_h);
if (g_dc_h != INVALID_HANDLE_VALUE) FlushFileBuffers(g_dc_h);
#endif
TerminateProcess(GetCurrentProcess(), 0xC0000005);
return EXCEPTION_EXECUTE_HANDLER;
}
struct SehInstall {
SehInstall() {
// Suppress WER popups; let the UEF handle it directly
SetErrorMode(SEM_FAILCRITICALERRORS | SEM_NOGPFAULTERRORBOX);
SetUnhandledExceptionFilter(av_handler);
}
} seh_install_guard;
#else // !_WIN32
#ifdef __cpp_lib_fstream_native_handle
static int g_dc_fd = -1;
static int g_das_fd = -1;
static void segv_handler(int sig) {
const char msg[] = "Access violation caught. Best-effort fsync.\n";
// Only use async-signal-safe functions
write(STDERR_FILENO, msg, sizeof(msg)-1);
if (g_das_fd != -1) fsync(g_das_fd);
if (g_dc_fd != -1) fsync(g_dc_fd);
_Exit(128 + sig);
}
#else
static void segv_handler(int sig) {
const char msg[] = "Access violation caught.\n";
write(STDERR_FILENO, msg, sizeof(msg)-1);
_Exit(128 + sig);
}
#endif
struct SegvInstall {
SegvInstall() {
struct sigaction sa{};
sa.sa_handler = segv_handler;
sigemptyset(&sa.sa_mask);
sa.sa_flags = SA_RESTART;
sigaction(SIGSEGV, &sa, nullptr);
}
} segv_install_guard;
#endif // _WIN32
int main(int argc, char* argv[])
{
const char* infile = nullptr;
unsigned disasm_flags = 0;
bool unitbuf = false;
bool banner = true;
std::ofstream dc_out_file;
std::ofstream das_out_file;
@@ -26,13 +106,23 @@ int main(int argc, char* argv[])
fputs("Options:\n", stderr);
fputs(" --pycode-extra Show extra fields in PyCode object dumps\n", stderr);
fputs(" --show-caches Don't suprress CACHE instructions in Python 3.11+ disassembly\n", stderr);
fputs(" --unitbuf Set output streams to be unbuffered\n", stderr);
fputs(" --no-banner Don't output banner\n", stderr);
fputs(" --help Show this help text and then exit\n", stderr);
return 0;
} else if (strcmp(argv[arg], "--unitbuf") == 0) {
unitbuf = true;
} else if (strcmp(argv[arg], "--no-banner") == 0) {
banner = false;
} else if (argv[arg][0] == '-') {
fprintf(stderr, "Error: Unrecognized argument %s\n", argv[arg]);
return 1;
} else {
} else if (!infile) {
infile = argv[arg];
} else {
fprintf(stderr, "Error: Only one input file allowed, got %s and %s\n",
infile, argv[arg]);
return 1;
}
}
@@ -50,17 +140,51 @@ int main(int argc, char* argv[])
}
dc_out_file.open(prefix_name + ".cdc.py", std::ios_base::out);
if (unitbuf) {
dc_out_file.setf(std::ios::unitbuf);
}
if (dc_out_file.fail()) {
fprintf(stderr, "Error opening file '%s' for writing\n", (prefix_name + ".cdc.py").c_str());
return 1;
}
das_out_file.open(prefix_name + ".das", std::ios_base::out);
if (unitbuf) {
das_out_file.setf(std::ios::unitbuf);
}
if (das_out_file.fail()) {
fprintf(stderr, "Error opening file '%s' for writing\n", (prefix_name + ".das").c_str());
return 1;
}
#ifdef __cpp_lib_fstream_native_handle
#ifndef _WIN32
g_dc_fd = dc_out_file.native_handle();
g_das_fd = das_out_file.native_handle();
#else
// Extract underlying handles to flush on exceptions
// MSVC's native_handle is typically a HANDLE; MinGW may return a fd, requiring conversion via _get_osfhandle
auto dc_nh = dc_out_file.native_handle();
auto das_nh = das_out_file.native_handle();
using native_handle_t = decltype(dc_nh);
if constexpr (std::is_same_v<native_handle_t, HANDLE>) {
g_dc_h = dc_nh;
g_das_h = das_nh;
} else if constexpr (std::is_integral_v<native_handle_t>) {
intptr_t dc_handle = _get_osfhandle(dc_nh);
if (dc_handle != -1 && dc_handle != reinterpret_cast<intptr_t>(INVALID_HANDLE_VALUE)) {
g_dc_h = reinterpret_cast<HANDLE>(dc_handle);
}
intptr_t das_handle = _get_osfhandle(das_nh);
if (das_handle != -1 && das_handle != reinterpret_cast<intptr_t>(INVALID_HANDLE_VALUE)) {
g_das_h = reinterpret_cast<HANDLE>(das_handle);
}
} else {
// ignore, keep as INVALID_HANDLE_VALUE
}
#endif
#endif
PycModule mod;
try {
mod.loadFromOneshotSequenceFile(infile);
@@ -79,7 +203,7 @@ int main(int argc, char* argv[])
const char* disp_prefix = strrchr(prefix_name.c_str(), PATHSEP);
disp_prefix = (disp_prefix == NULL) ? prefix_name.c_str() : disp_prefix + 1;
formatted_print(
banner && formatted_print(
das_out_file,
R"(# File: %s (Python %d.%d)
# Disassembly generated by Pyarmor-Static-Unpack-1shot (%s), powered by pycdas
@@ -111,13 +235,15 @@ int main(int argc, char* argv[])
das_out_file);
} catch (std::exception& ex) {
fprintf(stderr, "Error disassembling %s: %s\n", infile, ex.what());
das_out_file.flush();
das_out_file.close();
return 1;
}
das_out_file.flush();
das_out_file.close();
formatted_print(
banner && formatted_print(
dc_out_file,
R"(# File: %s (Python %d.%d)
# Source generated by Pyarmor-Static-Unpack-1shot (%s), powered by Decompyle++ (pycdc)
@@ -136,6 +262,8 @@ int main(int argc, char* argv[])
decompyle(mod.code(), &mod, dc_out_file);
} catch (std::exception& ex) {
fprintf(stderr, "Error decompyling %s: %s\n", infile, ex.what());
dc_out_file.flush();
dc_out_file.close();
return 1;
}

View File

@@ -183,6 +183,12 @@ void PycModule::setVersion(unsigned int magic)
m_unicode = true;
break;
case MAGIC_3_14:
m_maj = 3;
m_min = 14;
m_unicode = true;
break;
/* Bad Magic detected */
default:
m_maj = -1;
@@ -198,7 +204,7 @@ bool PycModule::isSupportedVersion(int major, int minor)
case 2:
return (minor >= 0 && minor <= 7);
case 3:
return (minor >= 0 && minor <= 12);
return (minor >= 0 && minor <= 14);
default:
return false;
}

View File

@@ -36,6 +36,7 @@ enum PycMagic {
MAGIC_3_11 = 0x0A0D0DA7,
MAGIC_3_12 = 0x0A0D0DCB,
MAGIC_3_13 = 0x0A0D0DF3,
MAGIC_3_14 = 0x0A0D0E29,
INVALID = 0,
};