mirror of
https://github.com/Ryujinx/Ryujinx.git
synced 2024-10-01 12:30:00 +02:00
infra: Add distribution files for macOS (#3934)
This upstream macOS packing and distribution files
This commit is contained in:
parent
3fb583c98c
commit
817b89767a
8 changed files with 953 additions and 0 deletions
46
distribution/macos/Info.plist
Normal file
46
distribution/macos/Info.plist
Normal file
|
@ -0,0 +1,46 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>CFBundleDevelopmentRegion</key>
|
||||||
|
<string>English</string>
|
||||||
|
<key>CFBundleExecutable</key>
|
||||||
|
<string>Ryujinx</string>
|
||||||
|
<key>CFBundleGetInfoString</key>
|
||||||
|
<string>Ryujinx</string>
|
||||||
|
<key>CFBundleIconFile</key>
|
||||||
|
<string>Ryujinx.icns</string>
|
||||||
|
<key>CFBundleTypeExtensions</key>
|
||||||
|
<array>
|
||||||
|
<string>nca</string>
|
||||||
|
<string>nro</string>
|
||||||
|
<string>nso</string>
|
||||||
|
<string>nsp</string>
|
||||||
|
<string>xci</string>
|
||||||
|
</array>
|
||||||
|
<key>CFBundleIdentifier</key>
|
||||||
|
<string>org.ryujinx.Ryujinx</string>
|
||||||
|
<key>CFBundleInfoDictionaryVersion</key>
|
||||||
|
<string>6.0</string>
|
||||||
|
<key>CFBundleLongVersionString</key>
|
||||||
|
<string>%%RYUJINX_BUILD_VERSION%%-%%RYUJINX_BUILD_GIT_HASH%%"</string>
|
||||||
|
<key>CFBundleName</key>
|
||||||
|
<string>Ryujinx</string>
|
||||||
|
<key>CFBundlePackageType</key>
|
||||||
|
<string>APPL</string>
|
||||||
|
<key>CFBundleShortVersionString</key>
|
||||||
|
<string>1.1</string>
|
||||||
|
<key>CFBundleSignature</key>
|
||||||
|
<string>????</string>
|
||||||
|
<key>CFBundleVersion</key>
|
||||||
|
<string>1.1.0</string>
|
||||||
|
<key>NSHighResolutionCapable</key>
|
||||||
|
<true/>
|
||||||
|
<key>CSResourcesFileMapped</key>
|
||||||
|
<true/>
|
||||||
|
<key>NSHumanReadableCopyright</key>
|
||||||
|
<string>Copyright © 2018 - 2022 Ryujinx Team and Contributors.</string>
|
||||||
|
<key>LSMinimumSystemVersion</key>
|
||||||
|
<string>11.0</string>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
BIN
distribution/macos/Ryujinx.icns
Normal file
BIN
distribution/macos/Ryujinx.icns
Normal file
Binary file not shown.
609
distribution/macos/bundle_fix_up.py
Normal file
609
distribution/macos/bundle_fix_up.py
Normal file
|
@ -0,0 +1,609 @@
|
||||||
|
import argparse
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import struct
|
||||||
|
import subprocess
|
||||||
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="Fixup for MacOS application bundle")
|
||||||
|
parser.add_argument("input_directory", help="Input directory (Application path)")
|
||||||
|
parser.add_argument("executable_sub_path", help="Main executable sub path")
|
||||||
|
|
||||||
|
# Use Apple LLVM on Darwin, otherwise standard LLVM.
|
||||||
|
if platform.system() == "Darwin":
|
||||||
|
OTOOL = "otool"
|
||||||
|
INSTALL_NAME_TOOL = "install_name_tool"
|
||||||
|
else:
|
||||||
|
OTOOL = shutil.which("llvm-otool")
|
||||||
|
if OTOOL is None:
|
||||||
|
for llvm_ver in [15, 14, 13]:
|
||||||
|
otool_path = shutil.which(f"llvm-otool-{llvm_ver}")
|
||||||
|
if otool_path is not None:
|
||||||
|
OTOOL = otool_path
|
||||||
|
INSTALL_NAME_TOOL = shutil.which(f"llvm-install-name-tool-{llvm_ver}")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
INSTALL_NAME_TOOL = shutil.which("llvm-install-name-tool")
|
||||||
|
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_dylib_id(dylib_path: Path) -> str:
|
||||||
|
res = subprocess.check_output([OTOOL, "-D", str(dylib_path.absolute())]).decode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
return res.split("\n")[1]
|
||||||
|
|
||||||
|
|
||||||
|
def get_dylib_dependencies(dylib_path: Path) -> List[str]:
|
||||||
|
output = (
|
||||||
|
subprocess.check_output([OTOOL, "-L", str(dylib_path.absolute())])
|
||||||
|
.decode("utf-8")
|
||||||
|
.split("\n")[1:]
|
||||||
|
)
|
||||||
|
|
||||||
|
res = []
|
||||||
|
|
||||||
|
for line in output:
|
||||||
|
line = line.strip()
|
||||||
|
index = line.find(" (compatibility version ")
|
||||||
|
if index == -1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
line = line[:index]
|
||||||
|
|
||||||
|
res.append(line)
|
||||||
|
|
||||||
|
return res
|
||||||
|
|
||||||
|
|
||||||
|
def replace_dylib_id(dylib_path: Path, new_id: str):
|
||||||
|
subprocess.check_call(
|
||||||
|
[INSTALL_NAME_TOOL, "-id", new_id, str(dylib_path.absolute())]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def change_dylib_link(dylib_path: Path, old: str, new: str):
|
||||||
|
subprocess.check_call(
|
||||||
|
[INSTALL_NAME_TOOL, "-change", old, new, str(dylib_path.absolute())]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def add_dylib_rpath(dylib_path: Path, rpath: str):
|
||||||
|
subprocess.check_call(
|
||||||
|
[INSTALL_NAME_TOOL, "-add_rpath", rpath, str(dylib_path.absolute())]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_dylib(
|
||||||
|
dylib_path: Path,
|
||||||
|
replacement_path: str,
|
||||||
|
search_path: List[str],
|
||||||
|
content_directory: Path,
|
||||||
|
):
|
||||||
|
dylib_id = get_dylib_id(dylib_path)
|
||||||
|
new_dylib_id = replacement_path + "/" + os.path.basename(dylib_id)
|
||||||
|
replace_dylib_id(dylib_path, new_dylib_id)
|
||||||
|
|
||||||
|
dylib_dependencies = get_dylib_dependencies(dylib_path)
|
||||||
|
dylib_new_mapping = {}
|
||||||
|
|
||||||
|
for dylib_dependency in dylib_dependencies:
|
||||||
|
if (
|
||||||
|
not dylib_dependency.startswith("@executable_path")
|
||||||
|
and not dylib_dependency.startswith("/usr/lib")
|
||||||
|
and not dylib_dependency.startswith("/System/Library")
|
||||||
|
):
|
||||||
|
dylib_dependency_name = os.path.basename(dylib_dependency)
|
||||||
|
library_found = False
|
||||||
|
for library_base_path in search_path:
|
||||||
|
lib_path = Path(os.path.join(library_base_path, dylib_dependency_name))
|
||||||
|
|
||||||
|
if lib_path.exists():
|
||||||
|
target_replacement_path = get_path_related_to_target_exec(
|
||||||
|
content_directory, lib_path
|
||||||
|
)
|
||||||
|
|
||||||
|
dylib_new_mapping[dylib_dependency] = (
|
||||||
|
target_replacement_path
|
||||||
|
+ "/"
|
||||||
|
+ os.path.basename(dylib_dependency)
|
||||||
|
)
|
||||||
|
library_found = True
|
||||||
|
|
||||||
|
if not library_found:
|
||||||
|
raise Exception(
|
||||||
|
f"{dylib_id}: Cannot find dependency {dylib_dependency_name} for fixup"
|
||||||
|
)
|
||||||
|
|
||||||
|
for key in dylib_new_mapping:
|
||||||
|
change_dylib_link(dylib_path, key, dylib_new_mapping[key])
|
||||||
|
|
||||||
|
|
||||||
|
FILE_TYPE_ASSEMBLY = 1
|
||||||
|
|
||||||
|
ALIGN_REQUIREMENTS = 4096
|
||||||
|
|
||||||
|
|
||||||
|
def parse_embedded_string(data: bytes) -> Tuple[bytes, str]:
|
||||||
|
first_byte = data[0]
|
||||||
|
|
||||||
|
if (first_byte & 0x80) == 0:
|
||||||
|
size = first_byte
|
||||||
|
data = data[1:]
|
||||||
|
else:
|
||||||
|
second_byte = data[1]
|
||||||
|
|
||||||
|
assert (second_byte & 0x80) == 0
|
||||||
|
|
||||||
|
size = (second_byte << 7) | (first_byte & 0x7F)
|
||||||
|
|
||||||
|
data = data[2:]
|
||||||
|
|
||||||
|
res = data[:size].decode("utf-8")
|
||||||
|
data = data[size:]
|
||||||
|
|
||||||
|
return (data, res)
|
||||||
|
|
||||||
|
|
||||||
|
def write_embedded_string(file, string: str):
|
||||||
|
raw_str = string.encode("utf-8")
|
||||||
|
raw_str_len = len(raw_str)
|
||||||
|
|
||||||
|
assert raw_str_len < 0x7FFF
|
||||||
|
|
||||||
|
if raw_str_len > 0x7F:
|
||||||
|
file.write(struct.pack("b", raw_str_len & 0x7F | 0x80))
|
||||||
|
file.write(struct.pack("b", raw_str_len >> 7))
|
||||||
|
else:
|
||||||
|
file.write(struct.pack("b", raw_str_len))
|
||||||
|
|
||||||
|
file.write(raw_str)
|
||||||
|
|
||||||
|
|
||||||
|
class BundleFileEntry(object):
|
||||||
|
offset: int
|
||||||
|
size: int
|
||||||
|
compressed_size: int
|
||||||
|
file_type: int
|
||||||
|
relative_path: str
|
||||||
|
data: bytes
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
offset: int,
|
||||||
|
size: int,
|
||||||
|
compressed_size: int,
|
||||||
|
file_type: int,
|
||||||
|
relative_path: str,
|
||||||
|
data: bytes,
|
||||||
|
) -> None:
|
||||||
|
self.offset = offset
|
||||||
|
self.size = size
|
||||||
|
self.compressed_size = compressed_size
|
||||||
|
self.file_type = file_type
|
||||||
|
self.relative_path = relative_path
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
def write(self, file):
|
||||||
|
self.offset = file.tell()
|
||||||
|
|
||||||
|
if (
|
||||||
|
self.file_type == FILE_TYPE_ASSEMBLY
|
||||||
|
and (self.offset % ALIGN_REQUIREMENTS) != 0
|
||||||
|
):
|
||||||
|
padding_size = ALIGN_REQUIREMENTS - (self.offset % ALIGN_REQUIREMENTS)
|
||||||
|
file.write(b"\0" * padding_size)
|
||||||
|
self.offset += padding_size
|
||||||
|
|
||||||
|
file.write(self.data)
|
||||||
|
|
||||||
|
def write_header(self, file):
|
||||||
|
file.write(
|
||||||
|
struct.pack(
|
||||||
|
"QQQb", self.offset, self.size, self.compressed_size, self.file_type
|
||||||
|
)
|
||||||
|
)
|
||||||
|
write_embedded_string(file, self.relative_path)
|
||||||
|
|
||||||
|
|
||||||
|
class BundleManifest(object):
|
||||||
|
major: int
|
||||||
|
minor: int
|
||||||
|
bundle_id: str
|
||||||
|
deps_json: BundleFileEntry
|
||||||
|
runtimeconfig_json: BundleFileEntry
|
||||||
|
flags: int
|
||||||
|
files: List[BundleFileEntry]
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
major: int,
|
||||||
|
minor: int,
|
||||||
|
bundle_id: str,
|
||||||
|
deps_json: BundleFileEntry,
|
||||||
|
runtimeconfig_json: BundleFileEntry,
|
||||||
|
flags: int,
|
||||||
|
files: List[BundleFileEntry],
|
||||||
|
) -> None:
|
||||||
|
self.major = major
|
||||||
|
self.minor = minor
|
||||||
|
self.bundle_id = bundle_id
|
||||||
|
self.deps_json = deps_json
|
||||||
|
self.runtimeconfig_json = runtimeconfig_json
|
||||||
|
self.flags = flags
|
||||||
|
self.files = files
|
||||||
|
|
||||||
|
def write(self, file) -> int:
|
||||||
|
for bundle_file in self.files:
|
||||||
|
bundle_file.write(file)
|
||||||
|
|
||||||
|
bundle_header_offset = file.tell()
|
||||||
|
file.write(struct.pack("iiI", self.major, self.minor, len(self.files)))
|
||||||
|
write_embedded_string(file, self.bundle_id)
|
||||||
|
|
||||||
|
if self.deps_json is not None:
|
||||||
|
deps_json_location_offset = self.deps_json.offset
|
||||||
|
deps_json_location_size = self.deps_json.size
|
||||||
|
else:
|
||||||
|
deps_json_location_offset = 0
|
||||||
|
deps_json_location_size = 0
|
||||||
|
|
||||||
|
if self.runtimeconfig_json is not None:
|
||||||
|
runtimeconfig_json_location_offset = self.runtimeconfig_json.offset
|
||||||
|
runtimeconfig_json_location_size = self.runtimeconfig_json.size
|
||||||
|
else:
|
||||||
|
runtimeconfig_json_location_offset = 0
|
||||||
|
runtimeconfig_json_location_size = 0
|
||||||
|
|
||||||
|
file.write(
|
||||||
|
struct.pack("qq", deps_json_location_offset, deps_json_location_size)
|
||||||
|
)
|
||||||
|
file.write(
|
||||||
|
struct.pack(
|
||||||
|
"qq",
|
||||||
|
runtimeconfig_json_location_offset,
|
||||||
|
runtimeconfig_json_location_size,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
file.write(struct.pack("q", self.flags))
|
||||||
|
|
||||||
|
for bundle_file in self.files:
|
||||||
|
bundle_file.write_header(file)
|
||||||
|
|
||||||
|
return bundle_header_offset
|
||||||
|
|
||||||
|
|
||||||
|
def read_file_entry(
|
||||||
|
raw_data: bytes, header_bytes: bytes
|
||||||
|
) -> Tuple[bytes, BundleFileEntry]:
|
||||||
|
(
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
compressed_size,
|
||||||
|
file_type,
|
||||||
|
) = struct.unpack("QQQb", header_bytes[:0x19])
|
||||||
|
(header_bytes, relative_path) = parse_embedded_string(header_bytes[0x19:])
|
||||||
|
|
||||||
|
target_size = compressed_size
|
||||||
|
|
||||||
|
if target_size == 0:
|
||||||
|
target_size = size
|
||||||
|
|
||||||
|
return (
|
||||||
|
header_bytes,
|
||||||
|
BundleFileEntry(
|
||||||
|
offset,
|
||||||
|
size,
|
||||||
|
compressed_size,
|
||||||
|
file_type,
|
||||||
|
relative_path,
|
||||||
|
raw_data[offset : offset + target_size],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def get_dotnet_bundle_data(data: bytes) -> Optional[Tuple[int, int, BundleManifest]]:
|
||||||
|
offset = data.find(hashlib.sha256(b".net core bundle\n").digest())
|
||||||
|
|
||||||
|
if offset == -1:
|
||||||
|
return None
|
||||||
|
|
||||||
|
raw_header_offset = data[offset - 8 : offset]
|
||||||
|
(header_offset,) = struct.unpack("q", raw_header_offset)
|
||||||
|
header_bytes = data[header_offset:]
|
||||||
|
|
||||||
|
(
|
||||||
|
major,
|
||||||
|
minor,
|
||||||
|
files_count,
|
||||||
|
) = struct.unpack("iiI", header_bytes[:0xC])
|
||||||
|
header_bytes = header_bytes[0xC:]
|
||||||
|
|
||||||
|
(header_bytes, bundle_id) = parse_embedded_string(header_bytes)
|
||||||
|
|
||||||
|
# v2 header
|
||||||
|
(
|
||||||
|
deps_json_location_offset,
|
||||||
|
deps_json_location_size,
|
||||||
|
) = struct.unpack("qq", header_bytes[:0x10])
|
||||||
|
(
|
||||||
|
runtimeconfig_json_location_offset,
|
||||||
|
runtimeconfig_json_location_size,
|
||||||
|
) = struct.unpack("qq", header_bytes[0x10:0x20])
|
||||||
|
(flags,) = struct.unpack("q", header_bytes[0x20:0x28])
|
||||||
|
header_bytes = header_bytes[0x28:]
|
||||||
|
|
||||||
|
files = []
|
||||||
|
|
||||||
|
deps_json = None
|
||||||
|
runtimeconfig_json = None
|
||||||
|
|
||||||
|
for _ in range(files_count):
|
||||||
|
(header_bytes, file_entry) = read_file_entry(data, header_bytes)
|
||||||
|
|
||||||
|
files.append(file_entry)
|
||||||
|
|
||||||
|
if file_entry.offset == deps_json_location_offset:
|
||||||
|
deps_json = file_entry
|
||||||
|
elif file_entry.offset == runtimeconfig_json_location_offset:
|
||||||
|
runtimeconfig_json = file_entry
|
||||||
|
|
||||||
|
file_entry = files[0]
|
||||||
|
|
||||||
|
return (
|
||||||
|
file_entry.offset,
|
||||||
|
header_offset,
|
||||||
|
BundleManifest(
|
||||||
|
major, minor, bundle_id, deps_json, runtimeconfig_json, flags, files
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
LC_SYMTAB = 0x2
|
||||||
|
LC_SEGMENT_64 = 0x19
|
||||||
|
LC_CODE_SIGNATURE = 0x1D
|
||||||
|
|
||||||
|
|
||||||
|
def fixup_linkedit(file, data: bytes, new_size: int):
|
||||||
|
offset = 0
|
||||||
|
|
||||||
|
(
|
||||||
|
macho_magic,
|
||||||
|
macho_cputype,
|
||||||
|
macho_cpusubtype,
|
||||||
|
macho_filetype,
|
||||||
|
macho_ncmds,
|
||||||
|
macho_sizeofcmds,
|
||||||
|
macho_flags,
|
||||||
|
macho_reserved,
|
||||||
|
) = struct.unpack("IiiIIIII", data[offset : offset + 0x20])
|
||||||
|
|
||||||
|
offset += 0x20
|
||||||
|
|
||||||
|
linkedit_offset = None
|
||||||
|
symtab_offset = None
|
||||||
|
codesign_offset = None
|
||||||
|
|
||||||
|
for _ in range(macho_ncmds):
|
||||||
|
(cmd, cmdsize) = struct.unpack("II", data[offset : offset + 8])
|
||||||
|
|
||||||
|
if cmd == LC_SEGMENT_64:
|
||||||
|
(
|
||||||
|
cmd,
|
||||||
|
cmdsize,
|
||||||
|
segname_raw,
|
||||||
|
vmaddr,
|
||||||
|
vmsize,
|
||||||
|
fileoff,
|
||||||
|
filesize,
|
||||||
|
maxprot,
|
||||||
|
initprot,
|
||||||
|
nsects,
|
||||||
|
flags,
|
||||||
|
) = struct.unpack("II16sQQQQiiII", data[offset : offset + 72])
|
||||||
|
segname = segname_raw.decode("utf-8").split("\0")[0]
|
||||||
|
|
||||||
|
if segname == "__LINKEDIT":
|
||||||
|
linkedit_offset = offset
|
||||||
|
elif cmd == LC_SYMTAB:
|
||||||
|
symtab_offset = offset
|
||||||
|
elif cmd == LC_CODE_SIGNATURE:
|
||||||
|
codesign_offset = offset
|
||||||
|
|
||||||
|
offset += cmdsize
|
||||||
|
pass
|
||||||
|
|
||||||
|
assert linkedit_offset is not None and symtab_offset is not None
|
||||||
|
|
||||||
|
# If there is a codesign section, clean it up.
|
||||||
|
if codesign_offset is not None:
|
||||||
|
(
|
||||||
|
codesign_cmd,
|
||||||
|
codesign_cmdsize,
|
||||||
|
codesign_dataoff,
|
||||||
|
codesign_datasize,
|
||||||
|
) = struct.unpack("IIII", data[codesign_offset : codesign_offset + 16])
|
||||||
|
file.seek(codesign_offset)
|
||||||
|
file.write(b"\0" * codesign_cmdsize)
|
||||||
|
|
||||||
|
macho_ncmds -= 1
|
||||||
|
macho_sizeofcmds -= codesign_cmdsize
|
||||||
|
file.seek(0)
|
||||||
|
file.write(
|
||||||
|
struct.pack(
|
||||||
|
"IiiIIIII",
|
||||||
|
macho_magic,
|
||||||
|
macho_cputype,
|
||||||
|
macho_cpusubtype,
|
||||||
|
macho_filetype,
|
||||||
|
macho_ncmds,
|
||||||
|
macho_sizeofcmds,
|
||||||
|
macho_flags,
|
||||||
|
macho_reserved,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
file.seek(codesign_dataoff)
|
||||||
|
file.write(b"\0" * codesign_datasize)
|
||||||
|
|
||||||
|
(
|
||||||
|
symtab_cmd,
|
||||||
|
symtab_cmdsize,
|
||||||
|
symtab_symoff,
|
||||||
|
symtab_nsyms,
|
||||||
|
symtab_stroff,
|
||||||
|
symtab_strsize,
|
||||||
|
) = struct.unpack("IIIIII", data[symtab_offset : symtab_offset + 24])
|
||||||
|
|
||||||
|
symtab_strsize = new_size - symtab_stroff
|
||||||
|
|
||||||
|
new_symtab = struct.pack(
|
||||||
|
"IIIIII",
|
||||||
|
symtab_cmd,
|
||||||
|
symtab_cmdsize,
|
||||||
|
symtab_symoff,
|
||||||
|
symtab_nsyms,
|
||||||
|
symtab_stroff,
|
||||||
|
symtab_strsize,
|
||||||
|
)
|
||||||
|
|
||||||
|
file.seek(symtab_offset)
|
||||||
|
file.write(new_symtab)
|
||||||
|
|
||||||
|
(
|
||||||
|
linkedit_cmd,
|
||||||
|
linkedit_cmdsize,
|
||||||
|
linkedit_segname_raw,
|
||||||
|
linkedit_vmaddr,
|
||||||
|
linkedit_vmsize,
|
||||||
|
linkedit_fileoff,
|
||||||
|
linkedit_filesize,
|
||||||
|
linkedit_maxprot,
|
||||||
|
linkedit_initprot,
|
||||||
|
linkedit_nsects,
|
||||||
|
linkedit_flags,
|
||||||
|
) = struct.unpack("II16sQQQQiiII", data[linkedit_offset : linkedit_offset + 72])
|
||||||
|
|
||||||
|
linkedit_filesize = new_size - linkedit_fileoff
|
||||||
|
linkedit_vmsize = linkedit_filesize
|
||||||
|
|
||||||
|
new_linkedit = struct.pack(
|
||||||
|
"II16sQQQQiiII",
|
||||||
|
linkedit_cmd,
|
||||||
|
linkedit_cmdsize,
|
||||||
|
linkedit_segname_raw,
|
||||||
|
linkedit_vmaddr,
|
||||||
|
linkedit_vmsize,
|
||||||
|
linkedit_fileoff,
|
||||||
|
linkedit_filesize,
|
||||||
|
linkedit_maxprot,
|
||||||
|
linkedit_initprot,
|
||||||
|
linkedit_nsects,
|
||||||
|
linkedit_flags,
|
||||||
|
)
|
||||||
|
file.seek(linkedit_offset)
|
||||||
|
file.write(new_linkedit)
|
||||||
|
|
||||||
|
|
||||||
|
def write_bundle_data(
|
||||||
|
output,
|
||||||
|
old_bundle_base_offset: int,
|
||||||
|
new_bundle_base_offset: int,
|
||||||
|
bundle: BundleManifest,
|
||||||
|
) -> int:
|
||||||
|
# Write bundle data
|
||||||
|
bundle_header_offset = bundle.write(output)
|
||||||
|
total_size = output.tell()
|
||||||
|
|
||||||
|
# Patch the header position
|
||||||
|
offset = file_data.find(hashlib.sha256(b".net core bundle\n").digest())
|
||||||
|
output.seek(offset - 8)
|
||||||
|
output.write(struct.pack("q", bundle_header_offset))
|
||||||
|
|
||||||
|
return total_size - new_bundle_base_offset
|
||||||
|
|
||||||
|
|
||||||
|
input_directory: Path = Path(args.input_directory)
|
||||||
|
content_directory: Path = Path(os.path.join(args.input_directory, "Contents"))
|
||||||
|
executable_path: Path = Path(os.path.join(content_directory, args.executable_sub_path))
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_related_to_other_path(a: Path, b: Path) -> str:
|
||||||
|
temp = b
|
||||||
|
|
||||||
|
parts = []
|
||||||
|
|
||||||
|
while temp != a:
|
||||||
|
temp = temp.parent
|
||||||
|
parts.append(temp.name)
|
||||||
|
|
||||||
|
parts.remove(parts[-1])
|
||||||
|
parts.reverse()
|
||||||
|
|
||||||
|
return "/".join(parts)
|
||||||
|
|
||||||
|
|
||||||
|
def get_path_related_to_target_exec(input_directory: Path, path: Path):
|
||||||
|
return "@executable_path/../" + get_path_related_to_other_path(
|
||||||
|
input_directory, path
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
search_path = [
|
||||||
|
Path(os.path.join(content_directory, "Frameworks")),
|
||||||
|
Path(os.path.join(content_directory, "Resources/lib")),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
for path in content_directory.rglob("**/*.dylib"):
|
||||||
|
current_search_path = [path.parent]
|
||||||
|
current_search_path.extend(search_path)
|
||||||
|
|
||||||
|
fixup_dylib(
|
||||||
|
path,
|
||||||
|
get_path_related_to_target_exec(content_directory, path),
|
||||||
|
current_search_path,
|
||||||
|
content_directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
for path in content_directory.rglob("**/*.so"):
|
||||||
|
current_search_path = [path.parent]
|
||||||
|
current_search_path.extend(search_path)
|
||||||
|
|
||||||
|
fixup_dylib(
|
||||||
|
path,
|
||||||
|
get_path_related_to_target_exec(content_directory, path),
|
||||||
|
current_search_path,
|
||||||
|
content_directory,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
with open(executable_path, "rb") as input:
|
||||||
|
file_data = input.read()
|
||||||
|
|
||||||
|
|
||||||
|
(bundle_base_offset, bundle_header_offset, bundle) = get_dotnet_bundle_data(file_data)
|
||||||
|
|
||||||
|
add_dylib_rpath(executable_path, "@executable_path/../Frameworks/")
|
||||||
|
|
||||||
|
# Recent "vanilla" version of LLVM (LLVM 13 and upper) seems to really dislike how .NET package its assemblies.
|
||||||
|
# As a result, after execution of install_name_tool it will have "fixed" the symtab resulting in a missing .NET bundle...
|
||||||
|
# To mitigate that, we check if the bundle offset inside the binary is valid after install_name_tool and readd .NET bundle if not.
|
||||||
|
output_file_size = os.stat(executable_path).st_size
|
||||||
|
if output_file_size < bundle_header_offset:
|
||||||
|
print("LLVM broke the .NET bundle, readding bundle data...")
|
||||||
|
with open(executable_path, "r+b") as output:
|
||||||
|
file_data = output.read()
|
||||||
|
bundle_data_size = write_bundle_data(
|
||||||
|
output, bundle_base_offset, output_file_size, bundle
|
||||||
|
)
|
||||||
|
|
||||||
|
# Now patch the __LINKEDIT section
|
||||||
|
new_size = output_file_size + bundle_data_size
|
||||||
|
fixup_linkedit(output, file_data, new_size)
|
95
distribution/macos/construct_universal_dylib.py
Normal file
95
distribution/macos/construct_universal_dylib.py
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
import argparse
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
import platform
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Construct Universal dylibs for nuget package"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"arm64_input_directory", help="ARM64 Input directory containing dylibs"
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"x86_64_input_directory", help="x86_64 Input directory containing dylibs"
|
||||||
|
)
|
||||||
|
parser.add_argument("output_directory", help="Output directory")
|
||||||
|
parser.add_argument("rglob", help="rglob")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
# Use Apple LLVM on Darwin, otherwise standard LLVM.
|
||||||
|
if platform.system() == "Darwin":
|
||||||
|
LIPO = "lipo"
|
||||||
|
else:
|
||||||
|
LIPO = shutil.which("llvm-lipo")
|
||||||
|
|
||||||
|
if LIPO is None:
|
||||||
|
for llvm_ver in [15, 14, 13]:
|
||||||
|
lipo_path = shutil.which(f"llvm-lipo-{llvm_ver}")
|
||||||
|
if lipo_path is not None:
|
||||||
|
LIPO = lipo_path
|
||||||
|
break
|
||||||
|
|
||||||
|
if LIPO is None:
|
||||||
|
raise Exception("Cannot find a valid location for LLVM lipo!")
|
||||||
|
|
||||||
|
arm64_input_directory: Path = Path(args.arm64_input_directory)
|
||||||
|
x86_64_input_directory: Path = Path(args.x86_64_input_directory)
|
||||||
|
output_directory: Path = Path(args.output_directory)
|
||||||
|
rglob = args.rglob
|
||||||
|
|
||||||
|
|
||||||
|
def get_new_name(
|
||||||
|
input_directory: Path, output_directory: str, input_dylib_path: Path
|
||||||
|
) -> Path:
|
||||||
|
input_component = str(input_dylib_path).replace(str(input_directory), "")[1:]
|
||||||
|
return Path(os.path.join(output_directory, input_component))
|
||||||
|
|
||||||
|
|
||||||
|
def is_fat_file(dylib_path: Path) -> str:
|
||||||
|
res = subprocess.check_output([LIPO, "-info", str(dylib_path.absolute())]).decode(
|
||||||
|
"utf-8"
|
||||||
|
)
|
||||||
|
|
||||||
|
return not res.split("\n")[0].startswith("Non-fat file")
|
||||||
|
|
||||||
|
|
||||||
|
def construct_universal_dylib(
|
||||||
|
arm64_input_dylib_path: Path, x86_64_input_dylib_path: Path, output_dylib_path: Path
|
||||||
|
):
|
||||||
|
if output_dylib_path.exists() or output_dylib_path.is_symlink():
|
||||||
|
os.remove(output_dylib_path)
|
||||||
|
|
||||||
|
os.makedirs(output_dylib_path.parent, exist_ok=True)
|
||||||
|
|
||||||
|
if arm64_input_dylib_path.is_symlink():
|
||||||
|
os.symlink(
|
||||||
|
os.path.basename(arm64_input_dylib_path.resolve()), output_dylib_path
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
if is_fat_file(arm64_input_dylib_path) or not x86_64_input_dylib_path.exists():
|
||||||
|
with open(output_dylib_path, "wb") as dst:
|
||||||
|
with open(arm64_input_dylib_path, "rb") as src:
|
||||||
|
dst.write(src.read())
|
||||||
|
else:
|
||||||
|
subprocess.check_call(
|
||||||
|
[
|
||||||
|
LIPO,
|
||||||
|
str(arm64_input_dylib_path.absolute()),
|
||||||
|
str(x86_64_input_dylib_path.absolute()),
|
||||||
|
"-output",
|
||||||
|
str(output_dylib_path.absolute()),
|
||||||
|
"-create",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
print(rglob)
|
||||||
|
for path in arm64_input_directory.rglob("**/*.dylib"):
|
||||||
|
construct_universal_dylib(
|
||||||
|
path,
|
||||||
|
get_new_name(arm64_input_directory, x86_64_input_directory, path),
|
||||||
|
get_new_name(arm64_input_directory, output_directory, path),
|
||||||
|
)
|
51
distribution/macos/create_app_bundle.sh
Executable file
51
distribution/macos/create_app_bundle.sh
Executable file
|
@ -0,0 +1,51 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
PUBLISH_DIRECTORY=$1
|
||||||
|
OUTPUT_DIRECTORY=$2
|
||||||
|
ENTITLEMENTS_FILE_PATH=$3
|
||||||
|
|
||||||
|
APP_BUNDLE_DIRECTORY=$OUTPUT_DIRECTORY/Ryujinx.app
|
||||||
|
|
||||||
|
rm -rf $APP_BUNDLE_DIRECTORY
|
||||||
|
mkdir -p $APP_BUNDLE_DIRECTORY/Contents
|
||||||
|
mkdir $APP_BUNDLE_DIRECTORY/Contents/Frameworks
|
||||||
|
mkdir $APP_BUNDLE_DIRECTORY/Contents/MacOS
|
||||||
|
mkdir $APP_BUNDLE_DIRECTORY/Contents/Resources
|
||||||
|
|
||||||
|
# Copy executables first
|
||||||
|
cp $PUBLISH_DIRECTORY/Ryujinx.Ava $APP_BUNDLE_DIRECTORY/Contents/MacOS/Ryujinx
|
||||||
|
chmod u+x $APP_BUNDLE_DIRECTORY/Contents/MacOS/Ryujinx
|
||||||
|
|
||||||
|
# Then all libraries
|
||||||
|
cp $PUBLISH_DIRECTORY/*.dylib $APP_BUNDLE_DIRECTORY/Contents/Frameworks
|
||||||
|
|
||||||
|
# Then resources
|
||||||
|
cp Info.plist $APP_BUNDLE_DIRECTORY/Contents
|
||||||
|
cp Ryujinx.icns $APP_BUNDLE_DIRECTORY/Contents/Resources/Ryujinx.icns
|
||||||
|
cp -r $PUBLISH_DIRECTORY/THIRDPARTY.md $APP_BUNDLE_DIRECTORY/Contents/Resources
|
||||||
|
|
||||||
|
echo -n "APPL????" > $APP_BUNDLE_DIRECTORY/Contents/PkgInfo
|
||||||
|
|
||||||
|
# Fixup libraries and executable
|
||||||
|
python3 bundle_fix_up.py $APP_BUNDLE_DIRECTORY MacOS/Ryujinx
|
||||||
|
|
||||||
|
# Now sign it
|
||||||
|
if ! [ -x "$(command -v codesign)" ];
|
||||||
|
then
|
||||||
|
if ! [ -x "$(command -v rcodesign)" ];
|
||||||
|
then
|
||||||
|
echo "Cannot find rcodesign on your system, please install rcodesign."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# NOTE: Currently require https://github.com/indygreg/apple-platform-rs/pull/44 to work on other OSes.
|
||||||
|
# cargo install --git "https://github.com/marysaka/apple-platform-rs" --branch "fix/adhoc-app-bundle" apple-codesign --bin "rcodesign"
|
||||||
|
echo "Usign rcodesign for ad-hoc signing"
|
||||||
|
rcodesign sign --entitlements-xml-path $ENTITLEMENTS_FILE_PATH $APP_BUNDLE_DIRECTORY
|
||||||
|
else
|
||||||
|
echo "Usign codesign for ad-hoc signing"
|
||||||
|
codesign --entitlements $ENTITLEMENTS_FILE_PATH -f --deep -s - $APP_BUNDLE_DIRECTORY
|
||||||
|
fi
|
||||||
|
|
105
distribution/macos/create_macos_release.sh
Executable file
105
distribution/macos/create_macos_release.sh
Executable file
|
@ -0,0 +1,105 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [ "$#" -ne 6 ]; then
|
||||||
|
echo "usage <BASE_DIR> <TEMP_DIRECTORY> <OUTPUT_DIRECTORY> <ENTITLEMENTS_FILE_PATH> <VERSION> <SOURCE_REVISION_ID>"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p $1
|
||||||
|
mkdir -p $2
|
||||||
|
mkdir -p $3
|
||||||
|
|
||||||
|
BASE_DIR=$(readlink -f $1)
|
||||||
|
TEMP_DIRECTORY=$(readlink -f $2)
|
||||||
|
OUTPUT_DIRECTORY=$(readlink -f $3)
|
||||||
|
ENTITLEMENTS_FILE_PATH=$(readlink -f $4)
|
||||||
|
VERSION=$5
|
||||||
|
SOURCE_REVISION_ID=$6
|
||||||
|
|
||||||
|
RELEASE_TAR_FILE_NAME=Ryujinx-$VERSION-macos_universal.app.tar
|
||||||
|
ARM64_APP_BUNDLE=$TEMP_DIRECTORY/output_arm64/Ryujinx.app
|
||||||
|
X64_APP_BUNDLE=$TEMP_DIRECTORY/output_x64/Ryujinx.app
|
||||||
|
UNIVERSAL_APP_BUNDLE=$OUTPUT_DIRECTORY/Ryujinx.app
|
||||||
|
EXECUTABLE_SUB_PATH=Contents/MacOS/Ryujinx
|
||||||
|
|
||||||
|
rm -rf $TEMP_DIRECTORY
|
||||||
|
mkdir -p $TEMP_DIRECTORY
|
||||||
|
|
||||||
|
DOTNET_COMMON_ARGS="-p:DebugType=embedded -p:Version=$VERSION -p:SourceRevisionId=$SOURCE_REVISION_ID -p:ExtraDefineConstants=DISABLE_UPDATER --self-contained true"
|
||||||
|
|
||||||
|
dotnet restore
|
||||||
|
dotnet build -c Release Ryujinx.Ava
|
||||||
|
dotnet publish -c Release -r osx-arm64 -o $TEMP_DIRECTORY/publish_arm64 $DOTNET_COMMON_ARGS Ryujinx.Ava
|
||||||
|
dotnet publish -c Release -r osx-x64 -o $TEMP_DIRECTORY/publish_x64 $DOTNET_COMMON_ARGS Ryujinx.Ava
|
||||||
|
|
||||||
|
# Get ride of the support library for ARMeilleur for x64 (that's only for arm64)
|
||||||
|
rm -rf $TEMP_DIRECTORY/publish_x64/libarmeilleure-jitsupport.dylib
|
||||||
|
|
||||||
|
# Get ride of libsoundio from arm64 builds as we don't have a arm64 variant
|
||||||
|
# TODO: remove this once done
|
||||||
|
rm -rf $TEMP_DIRECTORY/publish_arm64/libsoundio.dylib
|
||||||
|
|
||||||
|
pushd $BASE_DIR/distribution/macos
|
||||||
|
./create_app_bundle.sh $TEMP_DIRECTORY/publish_x64 $TEMP_DIRECTORY/output_x64 $ENTITLEMENTS_FILE_PATH
|
||||||
|
./create_app_bundle.sh $TEMP_DIRECTORY/publish_arm64 $TEMP_DIRECTORY/output_arm64 $ENTITLEMENTS_FILE_PATH
|
||||||
|
popd
|
||||||
|
|
||||||
|
rm -rf $UNIVERSAL_APP_BUNDLE
|
||||||
|
mkdir -p $OUTPUT_DIRECTORY
|
||||||
|
|
||||||
|
# Let's copy one of the two different app bundle and remove the executable
|
||||||
|
cp -R $ARM64_APP_BUNDLE $UNIVERSAL_APP_BUNDLE
|
||||||
|
rm $UNIVERSAL_APP_BUNDLE/$EXECUTABLE_SUB_PATH
|
||||||
|
|
||||||
|
# Make it libraries universal
|
||||||
|
python3 $BASE_DIR/distribution/macos/construct_universal_dylib.py $ARM64_APP_BUNDLE $X64_APP_BUNDLE $UNIVERSAL_APP_BUNDLE "**/*.dylib"
|
||||||
|
|
||||||
|
if ! [ -x "$(command -v lipo)" ];
|
||||||
|
then
|
||||||
|
if ! [ -x "$(command -v llvm-lipo-14)" ];
|
||||||
|
then
|
||||||
|
LIPO=llvm-lipo
|
||||||
|
else
|
||||||
|
LIPO=llvm-lipo-14
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
LIPO=lipo
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Make it the executable universal
|
||||||
|
$LIPO $ARM64_APP_BUNDLE/$EXECUTABLE_SUB_PATH $X64_APP_BUNDLE/$EXECUTABLE_SUB_PATH -output $UNIVERSAL_APP_BUNDLE/$EXECUTABLE_SUB_PATH -create
|
||||||
|
|
||||||
|
# Patch up the Info.plist to have appropriate version
|
||||||
|
sed -r -i.bck "s/\%\%RYUJINX_BUILD_VERSION\%\%/$VERSION/g;" $UNIVERSAL_APP_BUNDLE/Contents/Info.plist
|
||||||
|
sed -r -i.bck "s/\%\%RYUJINX_BUILD_GIT_HASH\%\%/$SOURCE_REVISION_ID/g;" $UNIVERSAL_APP_BUNDLE/Contents/Info.plist
|
||||||
|
rm $UNIVERSAL_APP_BUNDLE/Contents/Info.plist.bck
|
||||||
|
|
||||||
|
# Now sign it
|
||||||
|
if ! [ -x "$(command -v codesign)" ];
|
||||||
|
then
|
||||||
|
if ! [ -x "$(command -v rcodesign)" ];
|
||||||
|
then
|
||||||
|
echo "Cannot find rcodesign on your system, please install rcodesign."
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# NOTE: Currently require https://github.com/indygreg/apple-platform-rs/pull/44 to work on other OSes.
|
||||||
|
# cargo install --git "https://github.com/marysaka/apple-platform-rs" --branch "fix/adhoc-app-bundle" apple-codesign --bin "rcodesign"
|
||||||
|
echo "Usign rcodesign for ad-hoc signing"
|
||||||
|
rcodesign sign --entitlements-xml-path $ENTITLEMENTS_FILE_PATH $UNIVERSAL_APP_BUNDLE
|
||||||
|
else
|
||||||
|
echo "Usign codesign for ad-hoc signing"
|
||||||
|
codesign --entitlements $ENTITLEMENTS_FILE_PATH -f --deep -s - $UNIVERSAL_APP_BUNDLE
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Creating archive"
|
||||||
|
pushd $OUTPUT_DIRECTORY
|
||||||
|
tar --exclude "Ryujinx.app/Contents/MacOS/Ryujinx" -cvf $RELEASE_TAR_FILE_NAME Ryujinx.app 1> /dev/null
|
||||||
|
python3 $BASE_DIR/distribution/misc/add_tar_exec.py $RELEASE_TAR_FILE_NAME "Ryujinx.app/Contents/MacOS/Ryujinx" "Ryujinx.app/Contents/MacOS/Ryujinx"
|
||||||
|
gzip -9 < $RELEASE_TAR_FILE_NAME > $RELEASE_TAR_FILE_NAME.gz
|
||||||
|
rm $RELEASE_TAR_FILE_NAME
|
||||||
|
popd
|
||||||
|
|
||||||
|
echo "Done"
|
23
distribution/macos/entitlements.xml
Normal file
23
distribution/macos/entitlements.xml
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN"
|
||||||
|
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||||
|
<plist version="1.0">
|
||||||
|
<dict>
|
||||||
|
<key>com.apple.security.cs.allow-jit</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.cs.allow-unsigned-executable-memory</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.cs.allow-dyld-environment-variables</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.cs.disable-library-validation</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.cs.disable-executable-page-protection</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.cs.debugger</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.get-task-allow</key>
|
||||||
|
<true/>
|
||||||
|
<key>com.apple.security.hypervisor</key>
|
||||||
|
<true/>
|
||||||
|
</dict>
|
||||||
|
</plist>
|
24
distribution/misc/add_tar_exec.py
Normal file
24
distribution/misc/add_tar_exec.py
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
import argparse
|
||||||
|
from io import BytesIO
|
||||||
|
import tarfile
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="Add the main binary to a tar and force it to be executable"
|
||||||
|
)
|
||||||
|
parser.add_argument("input_tar_file", help="input tar file")
|
||||||
|
parser.add_argument("main_binary_path", help="Main executable path")
|
||||||
|
parser.add_argument("main_binary_tar_path", help="Main executable tar path")
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
input_tar_file = args.input_tar_file
|
||||||
|
main_binary_path = args.main_binary_path
|
||||||
|
main_binary_tar_path = args.main_binary_tar_path
|
||||||
|
|
||||||
|
with open(main_binary_path, "rb") as f:
|
||||||
|
with tarfile.open(input_tar_file, "a") as tar:
|
||||||
|
data = f.read()
|
||||||
|
tar_info = tarfile.TarInfo(main_binary_tar_path)
|
||||||
|
tar_info.mode = 0o755
|
||||||
|
tar_info.size = len(data)
|
||||||
|
|
||||||
|
tar.addfile(tar_info, BytesIO(data))
|
Loading…
Reference in a new issue