Compare commits

..

6 Commits

Author SHA1 Message Date
ciaranbor
56d0786fef Add image lightbox 2026-02-06 21:10:24 +00:00
rltakashige
3b2f553a25 Fix kimi tool calling id (#1413)
## Motivation

Kimi produces its own tool id. It gets confused when we generate our own
id.

## Changes

Add id to tool call item and parse Kimi id properly.

## Test Plan

### Manual Testing
<img width="3198" height="522" alt="image"
src="https://github.com/user-attachments/assets/d71ec2be-7f57-49dc-a569-d304cc430f4d"
/>

Long running Kimi K2.5 cluster querying itself through OpenCode running
on the same Kimi K2.5 instance.
2026-02-06 11:33:51 -08:00
rltakashige
5455a97a8c Fix GLM4Moe Tensor Sharding (#1411)
## Motivation

Recent commit broke glm (non lite) sharding

## Why It Works

Assert is no longer hit, as isinstance check includes
GLM4MoeDecoderLayer.
Added type stubs to keep the type checker happy.

## Test Plan

### Manual Testing
Runs as expected without gibberish.
2026-02-06 16:53:15 +00:00
ciaranbor
6f0cb99919 Ciaran/flux1 kontext (#1394)
## Motivation

Add support for FLUX.1-Kontext-dev, an image editing variant of
FLUX.1-dev

## Changes

- New FluxKontextModelAdapter: Handles Kontext's image-to-image workflow
- encodes input image as conditioning latents with special position IDs,
generates from pure noise
- Model config: 57 transformer blocks (19 joint + 38 single), guidance
scale 4.0, ImageToImage task
- Pipeline updates: Added kontext_image_ids property to PromptData
interface, passed through diffusion runner
  - Model cards: Added TOML configs for base, 4-bit, and 8-bit variants
  - Dependency: mflux 0.15.4 → 0.15.5
- Utility: tmp/quantize_and_upload.py for quantizing and uploading
models to HuggingFace

## Test Plan

### Manual Testing

Works better than Qwen-Image-Edit
2026-02-06 16:20:31 +00:00
ciaranbor
c8d3154f83 More image dimensions (#1395)
## Motivation

More dimensions for image generation

## Changes

- dashboard/src/lib/components/ImageParamsPanel.svelte: Added
"1024x1365" and "1365x1024" to the sizeOptions array
- dashboard/src/lib/stores/app.svelte.ts: Extended the size type in
ImageGenerationParams interface to include the two new dimension options
2026-02-06 15:59:06 +00:00
ciaranbor
63e9cc4fea Ciaran/num sync steps (#1396)
## Motivation

Allow users to directly configure num_sync_steps for distributed image
generation instead of deriving it from a factor of total steps.

## Changes

  - Added num_sync_steps field to AdvancedImageParams API (range 1-50)
- Changed model configs from num_sync_steps_factor: float to
num_sync_steps: int
  - Updated Flux/Qwen configs with direct values (1, 4, 7 respectively)
  - Added slider control in dashboard advanced params panel
  - Falls back to model default when not specified

## Why It Works

Decouples sync steps from inference steps, giving users direct control
over distributed inference synchronization while preserving sensible
defaults.

## Test Plan

### Manual Testing

  - Generate images with various sync step values via dashboard slider
  - Verify default behavior when parameter is unset
2026-02-06 15:51:46 +00:00
61 changed files with 1479 additions and 2374 deletions

3
.gitignore vendored
View File

@@ -35,3 +35,6 @@ hosts_*.json
# bench files
bench/**/*.json
# tmp
tmp/models

View File

@@ -0,0 +1,7 @@
"""
This type stub file was generated by pyright.
"""
from mflux.models.flux.variants.kontext.flux_kontext import Flux1Kontext
__all__ = ["Flux1Kontext"]

View File

@@ -0,0 +1,49 @@
"""
This type stub file was generated by pyright.
"""
from pathlib import Path
from typing import Any
from mlx import nn
from mflux.models.common.config.model_config import ModelConfig
from mflux.models.flux.model.flux_text_encoder.clip_encoder.clip_encoder import (
CLIPEncoder,
)
from mflux.models.flux.model.flux_text_encoder.t5_encoder.t5_encoder import T5Encoder
from mflux.models.flux.model.flux_transformer.transformer import Transformer
from mflux.models.flux.model.flux_vae.vae import VAE
from mflux.utils.generated_image import GeneratedImage
class Flux1Kontext(nn.Module):
vae: VAE
transformer: Transformer
t5_text_encoder: T5Encoder
clip_text_encoder: CLIPEncoder
bits: int | None
lora_paths: list[str] | None
lora_scales: list[float] | None
prompt_cache: dict[str, Any]
tokenizers: dict[str, Any]
def __init__(
self,
quantize: int | None = ...,
model_path: str | None = ...,
lora_paths: list[str] | None = ...,
lora_scales: list[float] | None = ...,
model_config: ModelConfig = ...,
) -> None: ...
def generate_image(
self,
seed: int,
prompt: str,
num_inference_steps: int = ...,
height: int = ...,
width: int = ...,
guidance: float = ...,
image_path: Path | str | None = ...,
image_strength: float | None = ...,
scheduler: str = ...,
) -> GeneratedImage: ...

View File

@@ -0,0 +1,16 @@
"""
This type stub file was generated by pyright.
"""
import mlx.core as mx
from mflux.models.flux.model.flux_vae.vae import VAE
class KontextUtil:
@staticmethod
def create_image_conditioning_latents(
vae: VAE,
height: int,
width: int,
image_path: str,
) -> tuple[mx.array, mx.array]: ...

View File

@@ -0,0 +1,153 @@
from dataclasses import dataclass
from typing import Any, Dict, Optional
import mlx.core as mx
import mlx.nn as nn
from .base import BaseModelArgs
from .switch_layers import SwitchGLU
@dataclass
class ModelArgs(BaseModelArgs):
model_type: str
vocab_size: int
hidden_size: int
intermediate_size: int
max_position_embeddings: int
moe_intermediate_size: int
norm_topk_prob: bool
num_attention_heads: int
n_group: int
head_dim: int
topk_group: int
n_shared_experts: int
n_routed_experts: int
routed_scaling_factor: float
num_experts_per_tok: int
first_k_dense_replace: int
num_hidden_layers: int
num_key_value_heads: int
rms_norm_eps: float
rope_theta: float
rope_scaling: Optional[Dict[str, Any]]
use_qk_norm: bool
tie_word_embeddings: bool
attention_bias: bool
partial_rotary_factor: float
scoring_func: str
topk_method: str
class Attention(nn.Module):
n_heads: int
n_kv_heads: int
scale: float
q_proj: nn.Linear
k_proj: nn.Linear
v_proj: nn.Linear
o_proj: nn.Linear
use_qk_norm: bool
q_norm: nn.RMSNorm
k_norm: nn.RMSNorm
rope: nn.RoPE
def __init__(self, args: ModelArgs) -> None: ...
def __call__(
self,
x: mx.array,
mask: Optional[mx.array] = None,
cache: Optional[Any] = None,
) -> mx.array: ...
class MLP(nn.Module):
config: ModelArgs
hidden_size: int
intermediate_size: int
gate_proj: nn.Linear
up_proj: nn.Linear
down_proj: nn.Linear
def __init__(
self,
config: ModelArgs,
hidden_size: Optional[int] = None,
intermediate_size: Optional[int] = None,
) -> None: ...
def __call__(self, x: mx.array) -> mx.array: ...
class MoEGate(nn.Module):
config: ModelArgs
top_k: int
norm_topk_prob: bool
n_routed_experts: int
routed_scaling_factor: float
n_group: int
topk_group: int
weight: mx.array
e_score_correction_bias: mx.array
def __init__(self, config: ModelArgs) -> None: ...
def __call__(self, x: mx.array) -> tuple[mx.array, mx.array]: ...
class MoE(nn.Module):
config: ModelArgs
num_experts_per_tok: int
switch_mlp: SwitchGLU
gate: MoEGate
shared_experts: MLP
sharding_group: Optional[mx.distributed.Group]
def __init__(self, config: ModelArgs) -> None: ...
def __call__(self, x: mx.array) -> mx.array: ...
class DecoderLayer(nn.Module):
self_attn: Attention
mlp: MLP | MoE
input_layernorm: nn.RMSNorm
post_attention_layernorm: nn.RMSNorm
def __init__(self, config: ModelArgs, layer_idx: int) -> None: ...
def __call__(
self,
x: mx.array,
mask: Optional[mx.array] = None,
cache: Optional[Any] = None,
) -> mx.array: ...
class LanguageModel(nn.Module):
vocab_size: int
embed_tokens: nn.Embedding
layers: list[DecoderLayer]
norm: nn.RMSNorm
pipeline_rank: int
pipeline_size: int
start_idx: int
end_idx: Optional[int]
num_layers: int
def __init__(self, config: ModelArgs) -> None: ...
def __call__(
self,
x: mx.array,
cache: Optional[Any] = None,
) -> mx.array: ...
@property
def pipeline_layers(self) -> list[DecoderLayer]: ...
class Model(nn.Module):
args: ModelArgs
model_type: str
model: LanguageModel
lm_head: nn.Linear
def __init__(self, config: ModelArgs) -> None: ...
def __call__(
self,
inputs: mx.array,
cache: Optional[Any] = None,
) -> mx.array: ...
def sanitize(self, weights: dict[str, Any]) -> dict[str, Any]: ...
def shard(self, group: Optional[mx.distributed.Group] = None) -> None: ...
@property
def layers(self) -> list[DecoderLayer]: ...
@property
def cast_predicate(self) -> Any: ...

View File

@@ -13,6 +13,7 @@
import type { MessageAttachment } from "$lib/stores/app.svelte";
import MarkdownContent from "./MarkdownContent.svelte";
import TokenHeatmap from "./TokenHeatmap.svelte";
import ImageLightbox from "./ImageLightbox.svelte";
interface Props {
class?: string;
@@ -101,6 +102,9 @@
let copiedMessageId = $state<string | null>(null);
let expandedThinkingMessageIds = $state<Set<string>>(new Set());
// Lightbox state
let expandedImageSrc = $state<string | null>(null);
// Uncertainty heatmap toggle
let heatmapMessageIds = $state<Set<string>>(new Set());
@@ -389,10 +393,15 @@
class="flex items-center gap-2 bg-exo-dark-gray/60 border border-exo-yellow/20 rounded px-2 py-1 text-xs font-mono"
>
{#if attachment.type === "image" && attachment.preview}
<!-- svelte-ignore a11y_no_noninteractive_element_interactions, a11y_click_events_have_key_events -->
<img
src={attachment.preview}
alt={attachment.name}
class="w-12 h-12 object-cover rounded border border-exo-yellow/20"
class="w-12 h-12 object-cover rounded border border-exo-yellow/20 cursor-pointer hover:border-exo-yellow/50 transition-colors"
onclick={() => {
if (attachment.preview)
expandedImageSrc = attachment.preview;
}}
/>
{:else}
<span>{getAttachmentIcon(attachment)}</span>
@@ -466,15 +475,44 @@
<div class="mb-3">
{#each message.attachments.filter((a) => a.type === "generated-image") as attachment}
<div class="relative group/img inline-block">
<!-- svelte-ignore a11y_no_noninteractive_element_interactions, a11y_click_events_have_key_events -->
<img
src={attachment.preview}
alt=""
class="max-w-full max-h-[512px] rounded-lg border border-exo-yellow/20 shadow-lg shadow-black/20"
class="max-w-full max-h-[512px] rounded-lg border border-exo-yellow/20 shadow-lg shadow-black/20 cursor-pointer"
onclick={() => {
if (attachment.preview)
expandedImageSrc = attachment.preview;
}}
/>
<!-- Button overlay -->
<div
class="absolute top-2 right-2 flex gap-1 opacity-0 group-hover/img:opacity-100 transition-opacity"
>
<!-- Expand button -->
<button
type="button"
class="p-2 rounded-lg bg-exo-dark-gray/80 border border-exo-yellow/30 text-exo-yellow hover:bg-exo-dark-gray hover:border-exo-yellow/50 cursor-pointer"
onclick={() => {
if (attachment.preview)
expandedImageSrc = attachment.preview;
}}
title="Expand image"
>
<svg
class="w-4 h-4"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
stroke-width="2"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M4 8V4m0 0h4M4 4l5 5m11-1V4m0 0h-4m4 0l-5 5M4 16v4m0 0h4m-4 0l5-5m11 5l-5-5m5 5v-4m0 4h-4"
/>
</svg>
</button>
<!-- Edit button -->
<button
type="button"
@@ -789,3 +827,8 @@
</button>
{/if}
</div>
<ImageLightbox
src={expandedImageSrc}
onclose={() => (expandedImageSrc = null)}
/>

View File

@@ -0,0 +1,96 @@
<script lang="ts">
import { fade, fly } from "svelte/transition";
import { cubicOut } from "svelte/easing";
interface Props {
src: string | null;
onclose: () => void;
}
let { src, onclose }: Props = $props();
function handleKeydown(e: KeyboardEvent) {
if (e.key === "Escape") {
onclose();
}
}
function extensionFromSrc(dataSrc: string): string {
const match = dataSrc.match(/^data:image\/(\w+)/);
if (match) return match[1] === "jpeg" ? "jpg" : match[1];
const urlMatch = dataSrc.match(/\.(\w+)(?:\?|$)/);
if (urlMatch) return urlMatch[1];
return "png";
}
function handleDownload(e: MouseEvent) {
e.stopPropagation();
if (!src) return;
const link = document.createElement("a");
link.href = src;
link.download = `image-${Date.now()}.${extensionFromSrc(src)}`;
link.click();
}
function handleClose(e: MouseEvent) {
e.stopPropagation();
onclose();
}
</script>
<svelte:window onkeydown={src ? handleKeydown : undefined} />
{#if src}
<div
class="fixed inset-0 z-50 bg-black/90 backdrop-blur-sm flex items-center justify-center"
transition:fade={{ duration: 200 }}
onclick={onclose}
role="presentation"
onintrostart={() => (document.body.style.overflow = "hidden")}
onoutroend={() => (document.body.style.overflow = "")}
>
<div class="absolute top-4 right-4 flex gap-2 z-10">
<button
type="button"
class="p-2 rounded-lg bg-exo-dark-gray/80 border border-exo-yellow/30 text-exo-yellow hover:bg-exo-dark-gray hover:border-exo-yellow/50 cursor-pointer transition-colors"
onclick={handleDownload}
title="Download image"
>
<svg
class="w-5 h-5"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
stroke-width="2"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
d="M4 16v1a3 3 0 003 3h10a3 3 0 003-3v-1m-4-4l-4 4m0 0l-4-4m4 4V4"
/>
</svg>
</button>
<button
type="button"
class="p-2 rounded-lg bg-exo-dark-gray/80 border border-exo-yellow/30 text-exo-yellow hover:bg-exo-dark-gray hover:border-exo-yellow/50 cursor-pointer transition-colors"
onclick={handleClose}
title="Close"
>
<svg class="w-5 h-5" viewBox="0 0 24 24" fill="currentColor">
<path
d="M19 6.41L17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12 19 6.41z"
/>
</svg>
</button>
</div>
<!-- svelte-ignore a11y_no_noninteractive_element_interactions, a11y_click_events_have_key_events -->
<img
{src}
alt=""
class="max-w-[90vw] max-h-[90vh] object-contain rounded-lg shadow-2xl"
transition:fly={{ y: 20, duration: 300, easing: cubicOut }}
onclick={(e) => e.stopPropagation()}
/>
</div>
{/if}

View File

@@ -64,6 +64,8 @@
"1024x1024",
"1024x768",
"768x1024",
"1024x1365",
"1365x1024",
];
const qualityOptions: ImageGenerationParams["quality"][] = [
@@ -148,6 +150,15 @@
setImageGenerationParams({ guidance: null });
}
function handleNumSyncStepsChange(event: Event) {
const value = parseInt((event.target as HTMLInputElement).value, 10);
setImageGenerationParams({ numSyncSteps: value });
}
function clearNumSyncSteps() {
setImageGenerationParams({ numSyncSteps: null });
}
function handleReset() {
resetImageGenerationParams();
showAdvanced = false;
@@ -157,7 +168,8 @@
params.seed !== null ||
params.numInferenceSteps !== null ||
params.guidance !== null ||
(params.negativePrompt !== null && params.negativePrompt.trim() !== ""),
(params.negativePrompt !== null && params.negativePrompt.trim() !== "") ||
params.numSyncSteps !== null,
);
</script>
@@ -578,7 +590,50 @@
</div>
</div>
<!-- Row 3: Negative Prompt -->
<!-- Row 3: Sync Steps -->
<div class="flex items-center gap-1.5">
<span
class="text-xs text-exo-light-gray uppercase tracking-wider whitespace-nowrap"
>SYNC STEPS:</span
>
<div class="flex items-center gap-2 flex-1 max-w-xs">
<input
type="range"
min="1"
max="100"
value={params.numSyncSteps ?? 1}
oninput={handleNumSyncStepsChange}
class="flex-1 h-1 bg-exo-medium-gray/50 rounded appearance-none cursor-pointer accent-exo-yellow"
/>
<span class="text-xs font-mono text-exo-yellow w-8 text-right">
{params.numSyncSteps ?? "--"}
</span>
{#if params.numSyncSteps !== null}
<button
type="button"
onclick={clearNumSyncSteps}
class="text-exo-light-gray hover:text-exo-yellow transition-colors"
title="Clear"
>
<svg
class="w-3 h-3"
fill="none"
viewBox="0 0 24 24"
stroke="currentColor"
>
<path
stroke-linecap="round"
stroke-linejoin="round"
stroke-width="2"
d="M6 18L18 6M6 6l12 12"
/>
</svg>
</button>
{/if}
</div>
</div>
<!-- Row 4: Negative Prompt -->
<div class="flex flex-col gap-1.5">
<span class="text-xs text-exo-light-gray uppercase tracking-wider"
>NEGATIVE PROMPT:</span

View File

@@ -286,7 +286,14 @@ const IMAGE_PARAMS_STORAGE_KEY = "exo-image-generation-params";
// Image generation params interface matching backend API
export interface ImageGenerationParams {
// Basic params
size: "512x512" | "768x768" | "1024x1024" | "1024x768" | "768x1024";
size:
| "512x512"
| "768x768"
| "1024x1024"
| "1024x768"
| "768x1024"
| "1024x1365"
| "1365x1024";
quality: "low" | "medium" | "high";
outputFormat: "png" | "jpeg";
numImages: number;
@@ -298,6 +305,7 @@ export interface ImageGenerationParams {
numInferenceSteps: number | null;
guidance: number | null;
negativePrompt: string | null;
numSyncSteps: number | null;
// Edit mode params
inputFidelity: "low" | "high";
}
@@ -319,6 +327,7 @@ const DEFAULT_IMAGE_PARAMS: ImageGenerationParams = {
numInferenceSteps: null,
guidance: null,
negativePrompt: null,
numSyncSteps: null,
inputFidelity: "low",
};
@@ -2396,7 +2405,9 @@ class AppStore {
params.seed !== null ||
params.numInferenceSteps !== null ||
params.guidance !== null ||
(params.negativePrompt !== null && params.negativePrompt.trim() !== "");
(params.negativePrompt !== null &&
params.negativePrompt.trim() !== "") ||
params.numSyncSteps !== null;
const requestBody: Record<string, unknown> = {
model,
@@ -2421,6 +2432,9 @@ class AppStore {
params.negativePrompt.trim() !== "" && {
negative_prompt: params.negativePrompt,
}),
...(params.numSyncSteps !== null && {
num_sync_steps: params.numSyncSteps,
}),
};
}
@@ -2670,29 +2684,19 @@ class AppStore {
formData.append("input_fidelity", params.inputFidelity);
// Advanced params
if (params.seed !== null) {
formData.append(
"advanced_params",
JSON.stringify({
seed: params.seed,
...(params.numInferenceSteps !== null && {
num_inference_steps: params.numInferenceSteps,
}),
...(params.guidance !== null && { guidance: params.guidance }),
...(params.negativePrompt !== null &&
params.negativePrompt.trim() !== "" && {
negative_prompt: params.negativePrompt,
}),
}),
);
} else if (
const hasAdvancedParams =
params.seed !== null ||
params.numInferenceSteps !== null ||
params.guidance !== null ||
(params.negativePrompt !== null && params.negativePrompt.trim() !== "")
) {
(params.negativePrompt !== null &&
params.negativePrompt.trim() !== "") ||
params.numSyncSteps !== null;
if (hasAdvancedParams) {
formData.append(
"advanced_params",
JSON.stringify({
...(params.seed !== null && { seed: params.seed }),
...(params.numInferenceSteps !== null && {
num_inference_steps: params.numInferenceSteps,
}),
@@ -2701,6 +2705,9 @@ class AppStore {
params.negativePrompt.trim() !== "" && {
negative_prompt: params.negativePrompt,
}),
...(params.numSyncSteps !== null && {
num_sync_steps: params.numSyncSteps,
}),
}),
);
}

View File

@@ -83,9 +83,6 @@
_module.args.pkgs = import inputs.nixpkgs {
inherit system;
config.allowUnfreePredicate = pkg: (pkg.pname or "") == "metal-toolchain";
overlays = [
(final: _: { apple-sdk_26 = final.callPackage ./nix/apple-sdk/package.nix { darwinSdkMajorVersion = "26"; }; })
];
};
treefmt = {
projectRootFile = "flake.nix";
@@ -108,10 +105,7 @@
enable = true;
package = pkgsSwift.swiftPackages.swift-format;
};
shfmt = {
enable = true;
excludes = [ "nix/apple-sdk/**" ];
};
shfmt.enable = true;
};
};
@@ -128,11 +122,6 @@
inherit uvLockMlxVersion;
};
default = self'.packages.exo;
sdk-version = pkgs.runCommand "sdk-version" { } ''
mkdir -p $out
echo ${pkgs.apple-sdk_26.version} > $out/version
'';
}
);

View File

View File

@@ -1,48 +0,0 @@
{ lib
, fetchFromGitHub
, stdenvNoCC
,
}:
let
CoreSymbolication = stdenvNoCC.mkDerivation (finalAttrs: {
pname = "CoreSymbolication";
version = "0-unstable-2018-06-17";
src = fetchFromGitHub {
repo = "CoreSymbolication";
owner = "matthewbauer";
rev = "24c87c23664b3ee05dc7a5a87d647ae476a680e4";
hash = "sha256-PzvLq94eNhP0+rLwGMKcMzxuD6MlrNI7iT/eV0obtSE=";
};
patches = [
# Add missing symbol definitions needed to build `zlog` in system_cmds.
# https://github.com/matthewbauer/CoreSymbolication/pull/2
../patches/0001-Add-function-definitions-needed-to-build-zlog-in-sys.patch
../patches/0002-Add-CF_EXPORT-To-const-symbols.patch
];
dontBuild = true;
installPhase = ''
mkdir -p "$out/include"
cp *.h "$out/include"
'';
meta = {
description = "Reverse engineered headers for Apple's CoreSymbolication framework";
homepage = "https://github.com/matthewbauer/CoreSymbolication";
license = lib.licenses.mit;
teams = [ lib.teams.darwin ];
platforms = lib.platforms.darwin;
};
});
in
self: super: {
buildPhase = super.buildPhase or "" + ''
mkdir -p System/Library/PrivateFrameworks/CoreSymbolication.framework/Versions/A/Headers
ln -s Versions/Current/Headers System/Library/PrivateFrameworks/CoreSymbolication.framework/Headers
cp '${CoreSymbolication}/include/'*.h System/Library/PrivateFrameworks/CoreSymbolication.framework/Versions/A/Headers
'';
}

View File

@@ -1,13 +0,0 @@
{ lib, config }:
self: super: {
preBuild = super.preBuild or "" + ''
platformPath=$out/Platforms/MacOSX.platform
sdkpath=$platformPath/Developer/SDKs
'';
preInstall = super.preInstall or "" + ''
platformPath=$out/Platforms/MacOSX.platform
sdkpath=$platformPath/Developer/SDKs
'';
}

View File

@@ -1,38 +0,0 @@
{ lib
, fetchurl
, cpio
, pbzx
,
}:
{ urls
, version
, hash
,
}:
fetchurl {
pname = "macOS-SDK";
inherit version urls hash;
recursiveHash = true;
nativeBuildInputs = [
cpio
pbzx
];
postFetch = ''
renamed=$(mktemp -d)/sdk.xar
mv "$downloadedFile" "$renamed"
pbzx "$renamed" | cpio -idm
src=Library/Developer/CommandLineTools/SDKs/MacOSX${lib.versions.majorMinor version}.sdk
# Remove unwanted binaries, man pages, and folders from the SDK.
rm -rf $src/usr/bin $src/usr/share $src/System/Library/Perl
mkdir -p "$out"
cp -rd $src/* "$out"
'';
}

View File

@@ -1,10 +0,0 @@
{ makeSetupHook, sdkVersion }:
self: super: {
passthru = super.passthru or { } // {
privateFrameworksHook = makeSetupHook
{
name = "apple-sdk-private-frameworks-hook";
} ../setup-hooks/add-private-frameworks.sh;
};
}

View File

@@ -1,38 +0,0 @@
let
lockfile = builtins.fromJSON (builtins.readFile ../metadata/apple-oss-lockfile.json);
in
{ lib
, fetchFromGitHub
, stdenvNoCC
, sdkVersion
,
}:
let
sdkinfo = lockfile.${sdkVersion};
in
self: super: {
passthru = super.passthru or { } // {
# Returns the raw source from apple-oss-distributions repo.
# This is mostly useful for copying private headers needed to build other source releases.
#
# Note: The source releases are mostly not used to build the SDK. Unless they can be used to build binaries,
# theyre not used.
sourceRelease =
name:
let
lockinfo = sdkinfo.${name};
in
fetchFromGitHub
{
owner = "apple-oss-distributions";
repo = name;
rev = lockinfo.rev or "${name}-${lockinfo.version}";
inherit (lockinfo) hash;
}
// {
inherit (lockinfo) version;
};
};
}

View File

@@ -1,327 +0,0 @@
{ lib
, stdenvNoCC
, xcodePlatform
, sdkVersion
,
}:
let
inherit (lib.generators) toPlist;
Info = rec {
CFBundleIdentifier = "com.apple.platform.${Name}";
DefaultProperties = {
COMPRESS_PNG_FILES = "NO";
DEPLOYMENT_TARGET_SETTING_NAME = stdenvNoCC.hostPlatform.darwinMinVersionVariable;
STRIP_PNG_TEXT = "NO";
};
Description = if stdenvNoCC.hostPlatform.isMacOS then "macOS" else "iOS";
FamilyIdentifier = lib.toLower xcodePlatform;
FamilyName = Description;
Identifier = CFBundleIdentifier;
MinimumSDKVersion = stdenvNoCC.hostPlatform.darwinMinVersion;
Name = lib.toLower xcodePlatform;
Type = "Platform";
Version = sdkVersion;
};
# These files are all based off of Xcode spec files found in
# /Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/PrivatePlugIns/IDEOSXSupportCore.ideplugin/Contents/Resources.
# Based off of the "MacOSX Architectures.xcspec" file. All i386 stuff
# is removed because NixPkgs only supports darwin-x86_64 and darwin-arm64.
Architectures = [
{
Identifier = "Standard";
Type = "Architecture";
Name = "Standard Architectures (Apple Silicon, 64-bit Intel)";
RealArchitectures = [
"arm64"
"x86_64"
];
ArchitectureSetting = "ARCHS_STANDARD";
}
{
Identifier = "Universal";
Type = "Architecture";
Name = "Universal (Apple Silicon, 64-bit Intel)";
RealArchitectures = [
"arm64"
"x86_64"
];
ArchitectureSetting = "ARCHS_STANDARD_32_64_BIT";
}
{
Identifier = "Native";
Type = "Architecture";
Name = "Native Architecture of Build Machine";
ArchitectureSetting = "NATIVE_ARCH_ACTUAL";
}
{
Identifier = "Standard64bit";
Type = "Architecture";
Name = "Apple Silicon, 64-bit Intel";
RealArchitectures = [
"arm64"
"x86_64"
];
ArchitectureSetting = "ARCHS_STANDARD_64_BIT";
}
{
Identifier = stdenvNoCC.hostPlatform.darwinArch;
Type = "Architecture";
Name = "Apple Silicon or Intel 64-bit";
}
{
Identifier = "Standard_Including_64_bit";
Type = "Architecture";
Name = "Standard Architectures (including 64-bit)";
RealArchitectures = [
"arm64"
"x86_64"
];
ArchitectureSetting = "ARCHS_STANDARD_INCLUDING_64_BIT";
}
];
# Based off of the "MacOSX Package Types.xcspec" file. Only keep the
# bare minimum needed.
PackageTypes = [
{
Identifier = "com.apple.package-type.mach-o-executable";
Type = "PackageType";
Name = "Mach-O Executable";
DefaultBuildSettings = {
EXECUTABLE_NAME = "$(EXECUTABLE_PREFIX)$(PRODUCT_NAME)$(EXECUTABLE_VARIANT_SUFFIX)$(EXECUTABLE_SUFFIX)";
EXECUTABLE_PATH = "$(EXECUTABLE_NAME)";
};
ProductReference = {
FileType = "compiled.mach-o.executable";
Name = "$(EXECUTABLE_NAME)";
};
}
{
Identifier = "com.apple.package-type.mach-o-objfile";
Type = "PackageType";
Name = "Mach-O Object File";
DefaultBuildSettings = {
EXECUTABLE_NAME = "$(EXECUTABLE_PREFIX)$(PRODUCT_NAME)$(EXECUTABLE_VARIANT_SUFFIX)$(EXECUTABLE_SUFFIX)";
EXECUTABLE_PATH = "$(EXECUTABLE_NAME)";
};
ProductReference = {
FileType = "compiled.mach-o.objfile";
Name = "$(EXECUTABLE_NAME)";
};
}
{
Identifier = "com.apple.package-type.mach-o-dylib";
Type = "PackageType";
Name = "Mach-O Dynamic Library";
DefaultBuildSettings = {
EXECUTABLE_NAME = "$(EXECUTABLE_PREFIX)$(PRODUCT_NAME)$(EXECUTABLE_VARIANT_SUFFIX)$(EXECUTABLE_SUFFIX)";
EXECUTABLE_PATH = "$(EXECUTABLE_NAME)";
};
ProductReference = {
FileType = "compiled.mach-o.dylib";
Name = "$(EXECUTABLE_NAME)";
};
}
{
Identifier = "com.apple.package-type.static-library";
Type = "PackageType";
Name = "Mach-O Static Library";
DefaultBuildSettings = {
EXECUTABLE_PREFIX = "lib";
EXECUTABLE_SUFFIX = ".a";
EXECUTABLE_NAME = "$(EXECUTABLE_PREFIX)$(PRODUCT_NAME)$(EXECUTABLE_VARIANT_SUFFIX)$(EXECUTABLE_SUFFIX)";
EXECUTABLE_PATH = "$(EXECUTABLE_NAME)";
};
ProductReference = {
FileType = "archive.ar";
Name = "$(EXECUTABLE_NAME)";
IsLaunchable = "NO";
};
}
{
Identifier = "com.apple.package-type.wrapper";
Type = "PackageType";
Name = "Wrapper";
DefaultBuildSettings = {
WRAPPER_SUFFIX = ".bundle";
WRAPPER_NAME = "$(WRAPPER_PREFIX)$(PRODUCT_NAME)$(WRAPPER_SUFFIX)";
CONTENTS_FOLDER_PATH = "$(WRAPPER_NAME)/Contents";
EXECUTABLE_NAME = "$(EXECUTABLE_PREFIX)$(PRODUCT_NAME)$(EXECUTABLE_VARIANT_SUFFIX)$(EXECUTABLE_SUFFIX)";
EXECUTABLE_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/MacOS";
EXECUTABLE_PATH = "$(EXECUTABLE_FOLDER_PATH)/$(EXECUTABLE_NAME)";
INFOPLIST_PATH = "$(CONTENTS_FOLDER_PATH)/Info.plist";
INFOSTRINGS_PATH = "$(LOCALIZED_RESOURCES_FOLDER_PATH)/InfoPlist.strings";
PKGINFO_PATH = "$(CONTENTS_FOLDER_PATH)/PkgInfo";
PBDEVELOPMENTPLIST_PATH = "$(CONTENTS_FOLDER_PATH)/pbdevelopment.plist";
VERSIONPLIST_PATH = "$(CONTENTS_FOLDER_PATH)/version.plist";
PUBLIC_HEADERS_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/Headers";
PRIVATE_HEADERS_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/PrivateHeaders";
EXECUTABLES_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/Executables";
FRAMEWORKS_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/Frameworks";
SHARED_FRAMEWORKS_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/SharedFrameworks";
SHARED_SUPPORT_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/SharedSupport";
UNLOCALIZED_RESOURCES_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/Resources";
LOCALIZED_RESOURCES_FOLDER_PATH = "$(UNLOCALIZED_RESOURCES_FOLDER_PATH)/$(DEVELOPMENT_LANGUAGE).lproj";
DOCUMENTATION_FOLDER_PATH = "$(LOCALIZED_RESOURCES_FOLDER_PATH)/Documentation";
PLUGINS_FOLDER_PATH = "$(CONTENTS_FOLDER_PATH)/PlugIns";
SCRIPTS_FOLDER_PATH = "$(UNLOCALIZED_RESOURCES_FOLDER_PATH)/Scripts";
};
ProductReference = {
FileType = "wrapper.cfbundle";
Name = "$(WRAPPER_NAME)";
IsLaunchable = "NO";
};
}
{
Identifier = "com.apple.package-type.wrapper.application";
Type = "PackageType";
BasedOn = "com.apple.package-type.wrapper";
Name = "Application Wrapper";
DefaultBuildSettings = {
GENERATE_PKGINFO_FILE = "YES";
};
ProductReference = {
FileType = "wrapper.application";
Name = "$(WRAPPER_NAME)";
IsLaunchable = "YES";
};
}
];
# Based off of the "MacOSX Product Types.xcspec" file. All
# bundles/wrapper are removed, because we prefer dynamic products in
# NixPkgs.
ProductTypes = [
{
Identifier = "com.apple.product-type.tool";
Type = "ProductType";
Name = "Command-line Tool";
PackageTypes = [ "com.apple.package-type.mach-o-executable" ];
}
{
Identifier = "com.apple.product-type.objfile";
Type = "ProductType";
Name = "Object File";
PackageTypes = [ "com.apple.package-type.mach-o-objfile" ];
}
{
Identifier = "com.apple.product-type.library.dynamic";
Type = "ProductType";
Name = "Dynamic Library";
PackageTypes = [ "com.apple.package-type.mach-o-dylib" ];
DefaultBuildProperties = {
FULL_PRODUCT_NAME = "$(EXECUTABLE_NAME)";
MACH_O_TYPE = "mh_dylib";
REZ_EXECUTABLE = "YES";
EXECUTABLE_SUFFIX = ".$(EXECUTABLE_EXTENSION)";
EXECUTABLE_EXTENSION = "dylib";
DYLIB_COMPATIBILITY_VERSION = "1";
DYLIB_CURRENT_VERSION = "1";
FRAMEWORK_FLAG_PREFIX = "-framework";
LIBRARY_FLAG_PREFIX = "-l";
LIBRARY_FLAG_NOSPACE = "YES";
STRIP_STYLE = "debugging";
GCC_INLINES_ARE_PRIVATE_EXTERN = "YES";
CODE_SIGNING_ALLOWED = "YES";
CODE_SIGNING_REQUIRED = "NO";
};
}
{
Identifier = "com.apple.product-type.library.static";
Type = "ProductType";
Name = "Static Library";
PackageTypes = [ "com.apple.package-type.static-library" ];
DefaultBuildProperties = {
FULL_PRODUCT_NAME = "$(EXECUTABLE_NAME)";
MACH_O_TYPE = "staticlib";
REZ_EXECUTABLE = "YES";
EXECUTABLE_PREFIX = "lib";
EXECUTABLE_SUFFIX = ".$(EXECUTABLE_EXTENSION)";
EXECUTABLE_EXTENSION = "a";
FRAMEWORK_FLAG_PREFIX = "-framework";
LIBRARY_FLAG_PREFIX = "-l";
LIBRARY_FLAG_NOSPACE = "YES";
STRIP_STYLE = "debugging";
SEPARATE_STRIP = "YES";
CLANG_ENABLE_MODULE_DEBUGGING = "NO";
};
}
{
Type = "ProductType";
Identifier = "com.apple.product-type.bundle";
Name = "Bundle";
DefaultBuildProperties = {
FULL_PRODUCT_NAME = "$(WRAPPER_NAME)";
MACH_O_TYPE = "mh_bundle";
WRAPPER_PREFIX = "";
WRAPPER_SUFFIX = ".$(WRAPPER_EXTENSION)";
WRAPPER_EXTENSION = "bundle";
WRAPPER_NAME = "$(WRAPPER_PREFIX)$(PRODUCT_NAME)$(WRAPPER_SUFFIX)";
FRAMEWORK_FLAG_PREFIX = "-framework";
LIBRARY_FLAG_PREFIX = "-l";
LIBRARY_FLAG_NOSPACE = "YES";
STRIP_STYLE = "non-global";
};
PackageTypes = [ "com.apple.package-type.wrapper" ];
IsWrapper = "YES";
HasInfoPlist = "YES";
HasInfoPlistStrings = "YES";
}
{
Identifier = "com.apple.product-type.application";
Type = "ProductType";
BasedOn = "com.apple.product-type.bundle";
Name = "Application";
DefaultBuildProperties = {
MACH_O_TYPE = "mh_execute";
WRAPPER_SUFFIX = ".$(WRAPPER_EXTENSION)";
WRAPPER_EXTENSION = "app";
};
PackageTypes = [ "com.apple.package-type.wrapper.application" ];
}
{
Type = "ProductType";
Identifier = "com.apple.product-type.framework";
Name = "Bundle";
DefaultBuildProperties = {
FULL_PRODUCT_NAME = "$(WRAPPER_NAME)";
MACH_O_TYPE = "mh_bundle";
WRAPPER_PREFIX = "";
WRAPPER_SUFFIX = ".$(WRAPPER_EXTENSION)";
WRAPPER_EXTENSION = "bundle";
WRAPPER_NAME = "$(WRAPPER_PREFIX)$(PRODUCT_NAME)$(WRAPPER_SUFFIX)";
FRAMEWORK_FLAG_PREFIX = "-framework";
LIBRARY_FLAG_PREFIX = "-l";
LIBRARY_FLAG_NOSPACE = "YES";
STRIP_STYLE = "non-global";
};
PackageTypes = [ "com.apple.package-type.wrapper" ];
IsWrapper = "YES";
HasInfoPlist = "YES";
HasInfoPlistStrings = "YES";
}
];
ToolchainInfo = {
Identifier = "com.apple.dt.toolchain.XcodeDefault";
};
in
{
"Info.plist" = builtins.toFile "Info.plist" (toPlist { escape = true; } Info);
"ToolchainInfo.plist" = builtins.toFile "ToolchainInfo.plist" (
toPlist { escape = true; } ToolchainInfo
);
"Architectures.xcspec" = builtins.toFile "Architectures.xcspec" (
toPlist { escape = true; } Architectures
);
"PackageTypes.xcspec" = builtins.toFile "PackageTypes.xcspec" (
toPlist { escape = true; } PackageTypes
);
"ProductTypes.xcspec" = builtins.toFile "ProductTypes.xcspec" (
toPlist { escape = true; } ProductTypes
);
}

View File

@@ -1,40 +0,0 @@
let
removedDylibs = [
# corecrypto is available under a very restrictive license (effectively: non-free, cant use).
# Without the headers and not being able to use corecrypto due to its license, its not very useful.
# Stubs are included in the SDK for all dylibs, including corecrypto. They should be removed.
"/usr/lib/system/libcorecrypto.dylib"
];
in
{ lib
, jq
, llvm
,
}:
self: super: {
nativeBuildInputs = super.nativeBuildInputs or [ ] ++ [
jq
llvm
];
buildPhase = super.buildPhase or "" + ''
echo "Removing the following dylibs from the libSystem reexported libraries list: ${lib.escapeShellArg (lib.concatStringsSep ", " removedDylibs)}"
for libSystem in libSystem.B.tbd libSystem.B_asan.tbd; do
# tbd-v5 is a JSON-based format, which can be manipulated by `jq`.
llvm-readtapi --filetype=tbd-v5 usr/lib/$libSystem \
| jq --argjson libs ${lib.escapeShellArg (builtins.toJSON removedDylibs)} '
if .libraries then
.libraries[] |= select(.install_names[] | any([.] | inside($libs)) | not)
else
.
end
| .main_library.reexported_libraries[].names[] |= select([.] | inside($libs) | not)
' > usr/lib/$libSystem~
# Convert libSystem back to tbd-v4 because not all tooling supports the JSON-based format yet.
llvm-readtapi --filetype=tbd-v4 usr/lib/$libSystem~ -o usr/lib/$libSystem
rm usr/lib/$libSystem~
done
'';
}

View File

@@ -1,74 +0,0 @@
{ lib
, cups
, darwin
, db
, libiconv
, ncurses
, stdenv
, stdenvNoCC
, xcbuild
,
}:
let
# CUPS has too many dependencies to build as part of the Darwin bootstrap. Its also typically taken as an explicit
# dependency by other packages, so building only the headers (to satisfy other SDK headers) should be okay.
cupsHeaders = darwin.bootstrapStdenv.mkDerivation {
pname = "${lib.getName cups}-headers";
version = lib.getVersion cups;
inherit (cups) src;
patches = cups.patches or [ ];
strictDeps = true;
dontBuild = true;
buildInputs = [ darwin.libresolv ]; # The `configure` script requires libresolv headers.
# CUPSs configure script fails to find `ar` when cross-compiling.
configureFlags = [ "ac_cv_path_AR=${stdenv.cc.targetPrefix}ar" ];
installTargets = [ "install-headers" ];
__structuredAttrs = true;
meta = {
inherit (cups.meta)
homepage
description
license
maintainers
platforms
;
};
};
in
self: super: {
# These packages are propagated only because other platforms include them in their libc (or otherwise by default).
# Reducing the number of special cases required to support Darwin makes supporting it easier for package authors.
propagatedBuildInputs =
super.propagatedBuildInputs or [ ]
++ [
libiconv
darwin.libresolv
darwin.libsbuf
# Shipped with the SDK only as a library with no headers
(lib.getLib darwin.libutil)
]
# x86_64-darwin links the object files from Csu when targeting very old releases
++ lib.optionals stdenvNoCC.hostPlatform.isx86_64 [ darwin.Csu ];
# The Darwin module for Swift requires certain headers to be included in the SDK (and not just be propagated).
buildPhase = super.buildPhase or "" + ''
for header in '${lib.getDev libiconv}/include/'* '${lib.getDev ncurses}/include/'* '${cupsHeaders}/include/'*; do
ln -s "$header" "usr/include/$(basename "$header")"
done
'';
# Exported to allow the headers to pass the requisites check in the stdenv bootstrap.
passthru = (super.passthru or { }) // {
cups-headers = cupsHeaders;
};
}

View File

@@ -1,53 +0,0 @@
{ lib
, pkgsBuildHost
, stdenv
, stdenvNoCC
, sdkVersion
,
}:
let
plists = import ./plists.nix {
inherit lib stdenvNoCC sdkVersion;
xcodePlatform = if stdenvNoCC.hostPlatform.isMacOS then "MacOSX" else "iPhoneOS";
};
inherit (pkgsBuildHost) darwin cctools xcbuild;
in
self: super: {
propagatedNativeBuildInputs = super.propagatedNativeBuildInputs or [ ] ++ [ xcbuild.xcrun ];
postInstall = super.postInstall or "" + ''
specspath=$out/Library/Xcode/Specifications
toolchainsPath=$out/Toolchains/XcodeDefault.xctoolchain
mkdir -p "$specspath" "$toolchainsPath"
# xcbuild expects to find things relative to the plist locations. If these are linked instead of copied,
# it wont find any platforms or SDKs.
cp '${plists."Info.plist"}' "$platformPath/Info.plist"
cp '${plists."ToolchainInfo.plist"}' "$toolchainsPath/ToolchainInfo.plist"
for spec in '${xcbuild}/Library/Xcode/Specifications/'*; do
ln -s "$spec" "$specspath/$(basename "$spec")"
done
cp '${plists."Architectures.xcspec"}' "$specspath/Architectures.xcspec"
cp '${plists."PackageTypes.xcspec"}' "$specspath/PackageTypes.xcspec"
cp '${plists."ProductTypes.xcspec"}' "$specspath/ProductTypes.xcspec"
mkdir -p "$out/usr/bin"
ln -s '${xcbuild.xcrun}/bin/xcrun' "$out/usr/bin/xcrun"
# Include `libtool` in the toolchain, so `xcrun -find libtool` can find it without requiring `cctools.libtool`
# as a `nativeBuildInput`.
mkdir -p "$toolchainsPath/usr/bin"
if [ -e '${cctools.libtool}/bin/${stdenv.cc.targetPrefix}libtool' ]; then
ln -s '${cctools.libtool}/bin/${stdenv.cc.targetPrefix}libtool' "$toolchainsPath/usr/bin/libtool"
fi
# Include additional binutils required by some packages (such as Chromium).
for tool in lipo nm otool size strip; do
if [ -e '${darwin.binutils-unwrapped}/bin/${stdenv.cc.targetPrefix}'$tool ]; then
ln -s '${darwin.binutils-unwrapped}/bin/${stdenv.cc.targetPrefix}'$tool "$toolchainsPath/usr/bin/$tool"
fi
done
'';
}

View File

@@ -1,24 +0,0 @@
let
disallowedPackages = builtins.fromJSON (builtins.readFile ../metadata/disallowed-packages.json);
in
{ lib
, jq
, stdenv
,
}:
self: super: {
# Remove headers and stubs for packages that are available in nixpkgs.
buildPhase = super.buildPhase or "" + ''
${lib.concatMapStringsSep "\n" (
pkg:
lib.concatLines (
[ ''echo "Removing headers and libraries from ${pkg.package}"'' ]
++ (map (header: "rm -rf -- usr/include/${header}") pkg.headers or [ ])
++ (map (framework: "rm -rf -- System/Library/Frameworks/${framework}") pkg.frameworks or [ ])
++ (map (library: "rm -rf -- usr/lib/${library}") pkg.libraries or [ ])
)
) disallowedPackages}
'';
}

View File

@@ -1,9 +0,0 @@
{}:
self: super: {
buildPhase = ''
runHook preBuild
${super.buildPhase or ""}
runHook postBuild
'';
}

View File

@@ -1,536 +0,0 @@
{
"14.4": {
"CarbonHeaders": {
"hash": "sha256-nIPXnLr21yVnpBhx9K5q3l/nPARA6JL/dED08MeyhP8=",
"version": "18.1"
},
"CommonCrypto": {
"hash": "sha256-/VoOR9wJuKnmGE1CWGGXxX8SpmALHnEooNTa3QM+ITc=",
"version": "600028.100.1"
},
"IOAudioFamily": {
"hash": "sha256-VSk3jvsITJugtL67Qt0m4qJ879i7Fj6B/NGBFVCwpiU=",
"version": "540.3"
},
"IOBDStorageFamily": {
"hash": "sha256-UgLMsQBe1QLzlbScmPmASBN7VH4YBmNOUX2CEDezjmE=",
"version": "22"
},
"IOCDStorageFamily": {
"hash": "sha256-p/2qM5zjXFDRb/DISpEHxQEdvmuLlRGt/Ygc71Yu2rI=",
"version": "61"
},
"IODVDStorageFamily": {
"hash": "sha256-1Sa8aZBGNtqJBNHva+YXxET6Wcdm2PgVrTzYT/8qrN4=",
"version": "45"
},
"IOFWDVComponents": {
"hash": "sha256-WkfkWnzRupEh20U7vjsTta89clhus6GTkOpXQWXw/bM=",
"version": "208"
},
"IOFireWireAVC": {
"hash": "sha256-IUytBKhhCgg0vtI+7q8d5kxpOUgO3tQD7TMy++jrorc=",
"version": "431"
},
"IOFireWireFamily": {
"hash": "sha256-W0KOF4hkA7kFOnL1ThAeFU/YlhFVqoqk9uzGjcBppX8=",
"version": "487"
},
"IOFireWireSBP2": {
"hash": "sha256-bItnRQIaGUxMyiU0q+4N8e5+jYiDEOUPmsrKhBFXvok=",
"version": "445"
},
"IOFireWireSerialBusProtocolTransport": {
"hash": "sha256-P7egeaD9SSa+YyrIRzM44gILKbIL7vezXK3M6q3MBOI=",
"version": "260"
},
"IOGraphics": {
"hash": "sha256-Ag37fd3tZJLXLVq1yzHOCWGOYYfwwTkC8hnvNaTEaWg=",
"version": "598"
},
"IOHIDFamily": {
"hash": "sha256-fmYTJsquAOBwzsgRmqPyjSJJi1hGcfnMmqLIcTe8W1s=",
"version": "2031.100.16"
},
"IOKitUser": {
"hash": "sha256-1bqRiLvyr2GQfbWwhXHXXIOtIka9YDw5GbKV6bd2k4k=",
"version": "100076.101.1"
},
"IONetworkingFamily": {
"hash": "sha256-J3cLeWKrQ8ypIaqgwRH9eU5JbjEDBVoezj3a2Lvwu5k=",
"version": "177"
},
"IOSerialFamily": {
"hash": "sha256-wVS4QTx6MBOS0VrwyCZ3s5Usezwaf8rWzmNnfdDTXTU=",
"version": "93"
},
"IOStorageFamily": {
"hash": "sha256-cllpJX11c3CX8zEYdOT2TC63sx7NUAHh33yRHhrG2Ro=",
"version": "315"
},
"IOUSBFamily": {
"hash": "sha256-Z0E3TfKP49toYo1Fo9kElRap8CZ+mVDHy5RIexgJTpA=",
"version": "630.4.5"
},
"Libc": {
"hash": "sha256-fxBM4KbPwQNVEJl7PCKP+1nUk9Oce/O2+0lVBxyngew=",
"version": "1592.100.35"
},
"Libinfo": {
"hash": "sha256-zZr6Mmou8Q+G6/wS+k0k7R+XirB94TNCUGS5dhi96ZE=",
"version": "583.0.1"
},
"Libm": {
"hash": "sha256-p4BndAag9d0XSMYWQ+c4myGv5qXbKx5E1VghudSbpTk=",
"version": "2026"
},
"Libnotify": {
"hash": "sha256-7X+6S3C7ZOTXJUeDXOOg5EmoZyLZvtE06x3Is0TGgSU=",
"version": "317.100.2"
},
"Librpcsvc": {
"hash": "sha256-UWYdCQ9QsBqwM01bWr+igINAHSdSluB/FrOclC5AjTI=",
"version": "31"
},
"Libsystem": {
"hash": "sha256-HsItciWrwyXujQ2hwqzv0JKOkkuynXYIqejLAEPJbMc=",
"version": "1345.100.2"
},
"OpenDirectory": {
"hash": "sha256-6fSl8PasCZSBfe0ftaePcBuSEO3syb6kK+mfDI6iR7A=",
"version": "146"
},
"Security": {
"hash": "sha256-NgTGbaw5JkpboDQpt1fSgUr9NYGS+bIOrEMQX7mLAME=",
"version": "61123.100.169"
},
"architecture": {
"hash": "sha256-PRNUrhzSOrwmxSPkKmV0LV7yEIik65sdkfKdBqcwFhU=",
"version": "282"
},
"configd": {
"hash": "sha256-+3xesYxqfsNjWCW3T87OA7+Z1hBqmGEh/I8kP8Ajbso=",
"version": "1300.100.9"
},
"copyfile": {
"hash": "sha256-rSCTgzdHr7QmnPk9rJ9P4fOAolnEQv8PHfgAY+qA0s4=",
"version": "196.100.4"
},
"dtrace": {
"hash": "sha256-04Q35rCKnM5Csv5poFJKpK0VplWq4hvy251/Cb2Kl80=",
"version": "401.100.3"
},
"dyld": {
"hash": "sha256-6P/Da6xP19vmaCROoYv9pl7DaW3/U+qZBJT8PD33bn0=",
"version": "1160.6"
},
"eap8021x": {
"hash": "sha256-Ky6KSlJhyX1NRufGhVBcp+ZFmqYrAxwC/5QvJhC2PhU=",
"version": "354.100.3"
},
"hfs": {
"hash": "sha256-+YUVOttZU7C8I14CC6t3ZH2KxAjjTA2nB0y5bPgLxZM=",
"version": "650.0.2"
},
"launchd": {
"hash": "sha256-8mW9bnuHmRXCx9py8Wy28C5b2QPICW0rlAps5njYa00=",
"version": "842.1.4"
},
"libclosure": {
"hash": "sha256-M/jnIHzKYvdFCO0tJ1JXiD/UcZtJhLIoulaCQQUbn30=",
"version": "90"
},
"libdispatch": {
"hash": "sha256-igqIA5DMVHjG30WMHZZpYY7LRM9hZyMWItD+UxeTehY=",
"version": "1477.100.9"
},
"libmalloc": {
"hash": "sha256-Sh4/z7lGWRMldOPURkP5vLOAb5Ou6AUsVJEWz9wk9hI=",
"version": "521.100.59"
},
"libplatform": {
"hash": "sha256-gojt3sWOr7XO2yYI/B1CmNLTPFieSfoNtlOgQahOCok=",
"version": "316.100.10"
},
"libpthread": {
"hash": "sha256-phjfN8+IU8ibPsflR6LktnSi3giy89ghI+cFyrhiQNo=",
"version": "519.101.1"
},
"mDNSResponder": {
"hash": "sha256-0ECbWeMnIRTsi03BeBEe5boyR/84JJPbxzPQze8hHSA=",
"version": "2200.100.94.0.2"
},
"objc4": {
"hash": "sha256-eUVSpbyTEOMEdHoxSv6lZIZwB+cW/YWIaTZTcHgGOjo=",
"version": "912.3"
},
"ppp": {
"hash": "sha256-8+QUA79sHf85yvGSPE9qCmGsrZDT3NZnbgZVroJw/Hg=",
"version": "1016"
},
"removefile": {
"hash": "sha256-L6I0u8S3h3uV1veKA5HvkSebbBCd78ymlf//KWbebZo=",
"version": "70.100.4"
},
"xnu": {
"hash": "sha256-j5Ep1RX5DTJqTGszrF4d/JtzUqZ6nA6XoExqcIQ0RVQ=",
"version": "10063.101.15"
}
},
"15.5": {
"CarbonHeaders": {
"hash": "sha256-nIPXnLr21yVnpBhx9K5q3l/nPARA6JL/dED08MeyhP8=",
"version": "18.1"
},
"CommonCrypto": {
"hash": "sha256-+qAwL6+s7di9cX/qXtapLkjCFoDuZaSYltRJEG4qekM=",
"version": "600035"
},
"IOAudioFamily": {
"hash": "sha256-VSk3jvsITJugtL67Qt0m4qJ879i7Fj6B/NGBFVCwpiU=",
"version": "600.2"
},
"IOBDStorageFamily": {
"hash": "sha256-s8hTwX0jq2iPULfBLUwpzqtszWuvJrrLGbmrKa/fY4U=",
"version": "24"
},
"IOCDStorageFamily": {
"hash": "sha256-p/2qM5zjXFDRb/DISpEHxQEdvmuLlRGt/Ygc71Yu2rI=",
"version": "62"
},
"IODVDStorageFamily": {
"hash": "sha256-1Sa8aZBGNtqJBNHva+YXxET6Wcdm2PgVrTzYT/8qrN4=",
"version": "46"
},
"IOFWDVComponents": {
"hash": "sha256-WkfkWnzRupEh20U7vjsTta89clhus6GTkOpXQWXw/bM=",
"version": "208"
},
"IOFireWireAVC": {
"hash": "sha256-qR9lSTa7PN5Z9Nis4tfuXlcZGMIU48dete/NPD0UBbE=",
"version": "434"
},
"IOFireWireFamily": {
"hash": "sha256-hmErAXjLWIelqJaCrB8J4IiIxyB7S6EHFY+AY9YhmKQ=",
"version": "490"
},
"IOFireWireSBP2": {
"hash": "sha256-Xk+PDnUaO9q46nQwHwTKf/QXtGclfs0wTWiUbcV7e4s=",
"version": "452"
},
"IOFireWireSerialBusProtocolTransport": {
"hash": "sha256-P7egeaD9SSa+YyrIRzM44gILKbIL7vezXK3M6q3MBOI=",
"version": "261"
},
"IOGraphics": {
"hash": "sha256-iysZE42mOKZbFxSZBNspaBTCRKEKK38DFGBxZWQxZxI=",
"version": "599"
},
"IOHIDFamily": {
"hash": "sha256-gEYPyjXgQ2ABGufCKPjmzMdNRLxhELkCvOURCokyTO4=",
"version": "2115.100.21"
},
"IOKitUser": {
"hash": "sha256-p32U+jHfwA/tqnjF4p1BmojghEXK8KxiflW3IHs2iIY=",
"version": "100150.120.2"
},
"IONetworkingFamily": {
"hash": "sha256-gZ7Dkk4Iu7AV9K2ioqSeJ1W7bTNxv77bmT18iv3ljLg=",
"version": "185"
},
"IOSerialFamily": {
"hash": "sha256-wVS4QTx6MBOS0VrwyCZ3s5Usezwaf8rWzmNnfdDTXTU=",
"version": "93"
},
"IOStorageFamily": {
"hash": "sha256-/0H0tqWUWkgYigYypucbc7lOCFYDuukwF9fvLEOhwOk=",
"version": "323"
},
"IOUSBFamily": {
"hash": "sha256-Z0E3TfKP49toYo1Fo9kElRap8CZ+mVDHy5RIexgJTpA=",
"version": "630.4.5"
},
"Libc": {
"hash": "sha256-nWDokN0Vr5pUyNGculnDOah9RNgHiWr3S13RSQLmZrc=",
"version": "1698.100.8"
},
"Libinfo": {
"hash": "sha256-UI5mGvzZ6BPafGYD6CrNAJAKjeJLB6urAS2lpB6X/Ec=",
"version": "597"
},
"Libm": {
"hash": "sha256-p4BndAag9d0XSMYWQ+c4myGv5qXbKx5E1VghudSbpTk=",
"version": "2026"
},
"Libnotify": {
"hash": "sha256-GDYMVi1034f9empq0YOuumQp/BDJ7phTb0Zl4KTY9xg=",
"version": "342"
},
"Librpcsvc": {
"hash": "sha256-UWYdCQ9QsBqwM01bWr+igINAHSdSluB/FrOclC5AjTI=",
"version": "31"
},
"Libsystem": {
"hash": "sha256-nawWJiu2IJ34ek5iOX6CrlqMzev7TuJpUkvDp30ZQ/U=",
"version": "1351"
},
"OpenDirectory": {
"hash": "sha256-6fSl8PasCZSBfe0ftaePcBuSEO3syb6kK+mfDI6iR7A=",
"version": "146"
},
"Security": {
"hash": "sha256-ZOrOOCk+hZbzDilzkihpQfsDpzV3Ul4zy6fpFRWUQHw=",
"version": "61439.120.27"
},
"architecture": {
"hash": "sha256-PRNUrhzSOrwmxSPkKmV0LV7yEIik65sdkfKdBqcwFhU=",
"version": "282"
},
"configd": {
"hash": "sha256-ZdUq1SrOwB88Lx68ekrA4zeVsLDZz4TAJywNnF+uAzY=",
"version": "1351.120.3"
},
"copyfile": {
"hash": "sha256-rLqT6e44W2ohgwUXREmiOyJBYCrV3gRLbtVnbUq60xc=",
"version": "221.121.1"
},
"dtrace": {
"hash": "sha256-iNEZyxK3DmEwO3gzrfvCaVZSEuuOMQm5IG/6FodPNdI=",
"version": "411"
},
"dyld": {
"hash": "sha256-4OOghgUYyMJbsTe96fiWCndTJ1BS94rK9v6Kqn/ooYs=",
"version": "1285.19"
},
"eap8021x": {
"hash": "sha256-Kx/wwnt108hDm0qQPyTNbZ8KoHkD5m7L4yb5qjSuQjI=",
"version": "365.120.2"
},
"hfs": {
"hash": "sha256-5/3Ycp3cKqlgAl1kjBmbF5tFlfJYQS5rbrbk4SS66b8=",
"version": "683.120.3"
},
"launchd": {
"hash": "sha256-8mW9bnuHmRXCx9py8Wy28C5b2QPICW0rlAps5njYa00=",
"version": "842.1.4"
},
"libclosure": {
"hash": "sha256-pvwfcbeEJmTEPdt6/lgVswiabLRG+sMN6VT5FwG7C4Q=",
"version": "96"
},
"libdispatch": {
"hash": "sha256-jTp2DolOOCQPBt1HRotkmPnKgQ2LGgniEqeHoM+vlKg=",
"version": "1521.120.4"
},
"libmalloc": {
"hash": "sha256-d9AVHSYTqHDlgctv8Hh4HAYW53MJelj4F8LWPsjrsws=",
"version": "715.120.13"
},
"libplatform": {
"hash": "sha256-gpijoTMvdkM0PdG8gyIllOJlh/MtTc4ro9ODDAhN6gM=",
"version": "349"
},
"libpthread": {
"hash": "sha256-N+MMXdbthsxauTTfZ5ElUs39dVH+Chn1yyU6pObZpkU=",
"version": "536"
},
"mDNSResponder": {
"hash": "sha256-ILx12PRxj/+VqfpCCErJFEJXFI9yzTh4g+FK0UCenIE=",
"version": "2600.120.12"
},
"objc4": {
"hash": "sha256-DMxa25gXjKCkiDnVJ/8SyJUjaBlmBGABg8EfCHcmTj0=",
"version": "940.4"
},
"ppp": {
"hash": "sha256-8+QUA79sHf85yvGSPE9qCmGsrZDT3NZnbgZVroJw/Hg=",
"version": "1016"
},
"removefile": {
"hash": "sha256-Z5UD0mk/s80CQB0PZWDzSl2JWXmnVmwUvlNb28+hR3k=",
"version": "81"
},
"xnu": {
"hash": "sha256-o4tCuCAIgAYg/Li3wTs12mVWr5C/4vbwu1zi+kJ9d6w=",
"version": "11417.121.6"
}
},
"26.0": {
"CarbonHeaders": {
"hash": "sha256-nIPXnLr21yVnpBhx9K5q3l/nPARA6JL/dED08MeyhP8=",
"version": "18.1"
},
"CommonCrypto": {
"hash": "sha256-+qAwL6+s7di9cX/qXtapLkjCFoDuZaSYltRJEG4qekM=",
"version": "600035"
},
"IOAudioFamily": {
"hash": "sha256-A3iiAjjP29VdjMj40tLS5Q/ni4qeh9bBpnmNzeG2pIY=",
"version": "700.2"
},
"IOBDStorageFamily": {
"hash": "sha256-OcQUJ3nEfrpvWX/npnedJ4PECIGWFSLiM0PKoiH911w=",
"version": "26"
},
"IOCDStorageFamily": {
"hash": "sha256-p/2qM5zjXFDRb/DISpEHxQEdvmuLlRGt/Ygc71Yu2rI=",
"version": "62"
},
"IODVDStorageFamily": {
"hash": "sha256-1Sa8aZBGNtqJBNHva+YXxET6Wcdm2PgVrTzYT/8qrN4=",
"version": "46"
},
"IOFWDVComponents": {
"hash": "sha256-WkfkWnzRupEh20U7vjsTta89clhus6GTkOpXQWXw/bM=",
"version": "208"
},
"IOFireWireAVC": {
"hash": "sha256-qR9lSTa7PN5Z9Nis4tfuXlcZGMIU48dete/NPD0UBbE=",
"version": "436"
},
"IOFireWireFamily": {
"hash": "sha256-hmErAXjLWIelqJaCrB8J4IiIxyB7S6EHFY+AY9YhmKQ=",
"version": "492"
},
"IOFireWireSBP2": {
"hash": "sha256-Xk+PDnUaO9q46nQwHwTKf/QXtGclfs0wTWiUbcV7e4s=",
"version": "454"
},
"IOFireWireSerialBusProtocolTransport": {
"hash": "sha256-cM/VFhVWNVwdJYk+mme0UYttQd7eJwd7Hlo7KNRyHY0=",
"version": "262"
},
"IOGraphics": {
"hash": "sha256-iysZE42mOKZbFxSZBNspaBTCRKEKK38DFGBxZWQxZxI=",
"version": "599"
},
"IOHIDFamily": {
"hash": "sha256-YLnabX90g4Q8LxjwVuJF6KODCDxychWV+VJaNG9d8fI=",
"version": "2222.0.24"
},
"IOKitUser": {
"hash": "sha256-ngwi8YMUqE0q8j7Lr5cqJwi2V+IDu3ie3bduotHIUJU=",
"version": "100222.0.4"
},
"IONetworkingFamily": {
"hash": "sha256-ZF5ML41Y1l1liQn32qTkcl4mMvx9Xdizb9VgvTzVTL4=",
"version": "186"
},
"IOSerialFamily": {
"hash": "sha256-wVS4QTx6MBOS0VrwyCZ3s5Usezwaf8rWzmNnfdDTXTU=",
"version": "93"
},
"IOStorageFamily": {
"hash": "sha256-1FKSF622qeXPGngA3UmQ2M/IU1pdlMoYBPbXytUFDaQ=",
"version": "331"
},
"IOUSBFamily": {
"hash": "sha256-Z0E3TfKP49toYo1Fo9kElRap8CZ+mVDHy5RIexgJTpA=",
"version": "630.4.5"
},
"Libc": {
"hash": "sha256-k+HQ+qgye0ORFm0hU8WzE4ysbbEoFZ7wcbVl5giDH/E=",
"version": "1725.0.11"
},
"Libinfo": {
"hash": "sha256-4InBEPi0n2EMo/8mIBib1Im4iTKRcRJ4IlAcLCigVGk=",
"version": "600"
},
"Libm": {
"hash": "sha256-p4BndAag9d0XSMYWQ+c4myGv5qXbKx5E1VghudSbpTk=",
"version": "2026"
},
"Libnotify": {
"hash": "sha256-p8cJZlBYOFmI1NDHXGYjgcv8z9Ldc1amZuYlxxJfeVY=",
"version": "344.0.1"
},
"Librpcsvc": {
"hash": "sha256-UWYdCQ9QsBqwM01bWr+igINAHSdSluB/FrOclC5AjTI=",
"version": "31"
},
"Libsystem": {
"hash": "sha256-/NlSwPaoTVx+bl9hYsfz3C5MuLdqGv4vdAh0KDbDKmY=",
"version": "1356"
},
"OpenDirectory": {
"hash": "sha256-6fSl8PasCZSBfe0ftaePcBuSEO3syb6kK+mfDI6iR7A=",
"version": "146"
},
"Security": {
"hash": "sha256-oxOvZsDoNYZNiWf+MASHrR4Q2o5oaqvK2We51hH7CO8=",
"version": "61901.0.87.0.1"
},
"architecture": {
"hash": "sha256-PRNUrhzSOrwmxSPkKmV0LV7yEIik65sdkfKdBqcwFhU=",
"version": "282"
},
"configd": {
"hash": "sha256-58or+OQP788UgQKO7Y8k8pY/enaSqH971ks7xCPu8fA=",
"version": "1385.0.7"
},
"copyfile": {
"hash": "sha256-I9uDi5BDQKa7mO3XpHxv0d6PiROW2ueZ3vGfrsG0OJo=",
"version": "230.0.1.0.1"
},
"dtrace": {
"hash": "sha256-5HpH6Cg8vWWzOX5ADD//izKDvqGnzV05Giju8lmGeyA=",
"version": "413"
},
"dyld": {
"hash": "sha256-jzoFLwbms0rUwzyjYif/r6Rmr4kyn+as/bhc4paEPeY=",
"version": "1323.3"
},
"eap8021x": {
"hash": "sha256-17bseWT4OWMA8hF+YSDDjxhVyJpbpP2xwv8dGti1YoM=",
"version": "368.0.3"
},
"hfs": {
"hash": "sha256-OkgqZ03gwn2hTuHxZrPDmQOrY4Dwu7MrX+BfG+PTgvE=",
"version": "704.0.3.0.2"
},
"launchd": {
"hash": "sha256-8mW9bnuHmRXCx9py8Wy28C5b2QPICW0rlAps5njYa00=",
"version": "842.1.4"
},
"libclosure": {
"hash": "sha256-pvwfcbeEJmTEPdt6/lgVswiabLRG+sMN6VT5FwG7C4Q=",
"version": "96"
},
"libdispatch": {
"hash": "sha256-L0+Ho9dAlMXVpqFEGIcIMsJc0gULckRulUImNEZe5MU=",
"version": "1542.0.4"
},
"libmalloc": {
"hash": "sha256-482hgm1ESr3LWC/JhuQNGNu9smsa2Eap49/eH+YNAio=",
"version": "792.1.1"
},
"libplatform": {
"hash": "sha256-wGZ2Im81mRXx6epgj/tbOJpg89CEbAr0Z8oFEpkyNMU=",
"version": "359.1.2"
},
"libpthread": {
"hash": "sha256-VuMpQjxuMsdHsFq0q6QIWSWi88gVF2jNzIfti20Gkbw=",
"version": "539"
},
"mDNSResponder": {
"hash": "sha256-iRqCpPAQDRjgRbRz3s6q2oyzq6xo+w4FTBai79104Zo=",
"version": "2881.0.25"
},
"objc4": {
"hash": "sha256-Nlgr36yLvGkUJIEFQ5w8FAB0r2syEsRTw0KuUShNT8E=",
"version": "950"
},
"ppp": {
"hash": "sha256-FzHZ05o7JxwgTqz0e3D68b/DiLu2x2ErzGMh0U78fLo=",
"version": "1020.1.1"
},
"removefile": {
"hash": "sha256-Z5UD0mk/s80CQB0PZWDzSl2JWXmnVmwUvlNb28+hR3k=",
"version": "84"
},
"xnu": {
"hash": "sha256-Cuf7kPtsn4CPXqyZmxVsJlA5i+Ikryp8ezJyGrvT63c=",
"version": "12377.1.9"
}
}
}

View File

@@ -1,533 +0,0 @@
[
{
"package": "apache",
"headers": [
"apache2"
]
},
{
"package": "apr",
"headers": [
"apr-1"
],
"libraries": [
"libapr-1.*",
"libaprutil-1.*"
]
},
{
"package": "boringssl",
"libraries": [
"libboringssl.*"
]
},
{
"package": "bzip2",
"headers": [
"bzlib.h"
],
"libraries": [
"libbz2.*"
]
},
{
"package": "corecrypto",
"libraries": [
"system/libcorecrypto*"
]
},
{
"package": "Csu",
"libraries": [
"*.o"
]
},
{
"package": "cups",
"headers": [
"cups"
],
"libraries": [
"libcups*"
]
},
{
"package": "curl",
"headers": [
"curl"
],
"libraries": [
"libcurl.*"
]
},
{
"package": "cyrus_sasl",
"headers": [
"sasl"
],
"libraries": [
"libsasl*"
]
},
{
"package": "editline",
"headers": [
"editline.h",
"editline"
],
"libraries": [
"libedit.*",
"libeditline.*"
]
},
{
"package": "html-tidy",
"headers": [
"tidy*"
],
"libraries": [
"libtidy.*"
]
},
{
"package": "hunspell",
"headers": [
"hunspell"
],
"libraries": [
"libhunspell*"
]
},
{
"package": "icu",
"headers": [
"unicode"
],
"libraries": [
"libicucore.*"
]
},
{
"package": "libarchive",
"headers": [
"archive.h",
"archive_entry.h"
],
"libraries": [
"libarchive.*"
]
},
{
"package": "libc++",
"headers": [
"c++",
"cxxabi.h",
"__cxxabi_config.h"
],
"libraries": [
"libc++*"
]
},
{
"package": "ld64",
"libraries": [
"libcodedirectory.*",
"libcodedirectory_static.*"
]
},
{
"package": "expat",
"headers": [
"expat.h",
"expat_config.h",
"expat_external.h"
],
"libraries": [
"libexpat.*"
]
},
{
"package": "libffi",
"headers": [
"ffi*"
],
"libraries": [
"libffi*"
]
},
{
"package": "libgcc",
"libraries": [
"libgcc*"
]
},
{
"package": "libiconv",
"headers": [
"iconv.h",
"libcharset.h",
"localcharset.h"
],
"libraries": [
"libcharset.*",
"libiconv.*",
"i18n"
]
},
{
"package": "libiodbc",
"libraries": [
"libiodbc*"
]
},
{
"package": "libkrb4",
"libraries": [
"libkrb4.*"
]
},
{
"package": "libkrb5",
"headers": [
"com_err.h",
"gssapi",
"gssapi.h",
"gssrpc",
"kadm5",
"kdb.h",
"krad.h",
"krb5",
"krb5.h",
"profile.h",
"verto-module.h",
"verto.h"
],
"libraries": [
"krb5",
"libcom_err.*",
"libgssapi_krb5.*",
"libgssrpc.*",
"libk5crypto.*",
"libkadm5clnt.*",
"libkadm5clnt_mit.*",
"libkadm5srv.*",
"libkadm5srv_mit.*",
"libkdb5.*",
"libkrad.*",
"libkrb5*",
"libkrb5support.*",
"libverto.*"
]
},
{
"package": "libpcap",
"headers": [
"pcap*"
],
"libraries": [
"libpcap.*"
]
},
{
"package": "libresolv",
"headers": [
"arpa/nameser.h",
"arpa/nameser_compat.h",
"dns.h",
"dns_util.h",
"nameser.h",
"resolv.h"
],
"libraries": [
"libresolv.*"
]
},
{
"package": "libstdc++",
"libraries": [
"libstdc++.*"
]
},
{
"package": "libsbuf",
"headers": [
"usbuf.h"
],
"libraries": [
"libsbuf.*"
]
},
{
"package": "libtermcap",
"headers": [
"termcap.h"
],
"libraries": [
"libtermcap.*"
]
},
{
"package": "libutil",
"headers": [
"libutil.h"
],
"libraries": [
"libutil.*",
"libutil1.*"
]
},
{
"package": "libxml2",
"headers": [
"libxml",
"libxml2"
],
"libraries": [
"libxml2.*"
]
},
{
"package": "libxo",
"headers": [
"libxo"
],
"libraries": [
"libxo.*"
]
},
{
"package": "libxslt",
"headers": [
"libexslt",
"libxslt"
],
"libraries": [
"libexslt.*",
"libxslt.*"
]
},
{
"package": "liby",
"libraries": [
"liby.a"
]
},
{
"package": "marisa-trie",
"libraries": [
"libmarisa.*"
]
},
{
"package": "ncurses",
"headers": [
"curses*",
"cursslk.h",
"eti.h",
"etip.h",
"form.h",
"menu.h",
"nc_tparm.h",
"ncurses*",
"panel.h",
"term.h",
"term_entry.h",
"termcap.h",
"tic.h",
"unctrl.h"
],
"libraries": [
"libcurses.*",
"libform.*",
"libformw.*",
"libmenu.*",
"libmenuw.*",
"libncurses.*",
"libncursesw.*",
"libpanel.*",
"libpanelw.*",
"libtinfo.*"
]
},
{
"package": "net-snmp",
"headers": [
"net-snmp"
],
"libraries": [
"libnetsnmp*"
]
},
{
"package": "nghttp",
"libraries": [
"lib*nghttp2.*"
]
},
{
"package": "openblas",
"headers": [
"cblas.h",
"f77blas.h",
"lapack.h",
"lapacke.h",
"lapacke_config.h",
"lapacke_mangling.h",
"lapacke_utils.h",
"openblas_config.h"
],
"libraries": [
"libblas.*",
"libcblas.*",
"libclapack.*",
"libf77lapack.*",
"liblapack.*",
"liblapacke.*",
"libopenblas.*",
"libopenblas.*",
"libopenblasp*"
]
},
{
"package": "openldap",
"libraries": [
"liblber.*",
"liblber_r.*",
"libldap.*",
"libldap_r.*"
]
},
{
"package": "openpam",
"headers": [
"security"
],
"libraries": [
"libpam.*",
"pam_*"
]
},
{
"package": "pcre",
"headers": [
"pcre.h",
"pcreposix.h"
],
"libraries": [
"libpcre.*",
"libpcre2*",
"libpcreposix.*"
]
},
{
"package": "php",
"headers": [
"php"
],
"libraries": [
"php"
]
},
{
"package": "postgresql",
"libraries": [
"libecpg*",
"libpg*",
"libpq*"
]
},
{
"package": "python",
"headers": [
"python*"
],
"frameworks": [
"Python.framework"
],
"libraries": [
"libpython*",
"python*"
]
},
{
"package": "readline",
"headers": [
"readline"
],
"libraries": [
"libhistory.*",
"libreadline.*"
]
},
{
"package": "ruby",
"frameworks": [
"Ruby.framework"
],
"libraries": [
"libruby.*",
"ruby"
]
},
{
"package": "sqlite3",
"headers": [
"sqlite3.h",
"sqlite3ext.h"
],
"libraries": [
"libsqlite3.*"
]
},
{
"package": "swift",
"libraries": [
"swift/shims"
]
},
{
"package": "tcl",
"headers": [
"tcl*",
"tk*"
],
"frameworks": [
"Tcl.framework",
"Tk.framework"
],
"libraries": [
"libtcl*",
"libtk*",
"tclConfig.sh",
"tkConfig.sh"
]
},
{
"package": "xar",
"headers": [
"xar"
],
"libraries": [
"libxar.*"
]
},
{
"package": "xz",
"headers": [
"lzma*"
],
"libraries": [
"liblzma.*"
]
},
{
"package": "zlib",
"headers": [
"zconf.h",
"zlib.h"
],
"libraries": [
"libz.*"
]
}
]

View File

@@ -1,26 +0,0 @@
{
"14": {
"urls": [
"https://swcdn.apple.com/content/downloads/14/48/052-59890-A_I0F5YGAY0Y/p9n40hio7892gou31o1v031ng6fnm9sb3c/CLTools_macOSNMOS_SDK.pkg",
"https://web.archive.org/web/20250211001355/https://swcdn.apple.com/content/downloads/14/48/052-59890-A_I0F5YGAY0Y/p9n40hio7892gou31o1v031ng6fnm9sb3c/CLTools_macOSNMOS_SDK.pkg"
],
"version": "14.4",
"hash": "sha256-QozDiwY0Czc0g45vPD7G4v4Ra+3DujCJbSads3fJjjM="
},
"15": {
"urls": [
"https://swcdn.apple.com/content/downloads/52/01/082-41241-A_0747ZN8FHV/dectd075r63pppkkzsb75qk61s0lfee22j/CLTools_macOSNMOS_SDK.pkg",
"https://web.archive.org/web/20250530132510/https://swcdn.apple.com/content/downloads/52/01/082-41241-A_0747ZN8FHV/dectd075r63pppkkzsb75qk61s0lfee22j/CLTools_macOSNMOS_SDK.pkg"
],
"version": "15.5",
"hash": "sha256-HBiSJuw1XBUK5R/8Sj65c3rftSEvQl/O9ZZVp/g1Amo="
},
"26": {
"urls": [
"https://swcdn.apple.com/content/downloads/60/22/089-71960-A_W8BL1RUJJ6/5zkyplomhk1cm7z6xja2ktgapnhhti6wwd/CLTools_macOSNMOS_SDK.pkg",
"https://web.archive.org/web/20250915230423/https://swcdn.apple.com/content/downloads/60/22/089-71960-A_W8BL1RUJJ6/5zkyplomhk1cm7z6xja2ktgapnhhti6wwd/CLTools_macOSNMOS_SDK.pkg"
],
"version": "26.2",
"hash": "sha256-hXRlMieVv0smna5uiWRwq87IWOaPWtAjAldbi+wQXcw="
}
}

View File

@@ -1,110 +0,0 @@
let
sdkVersions = builtins.fromJSON (builtins.readFile ./metadata/versions.json);
in
{ lib
, stdenv
, stdenvNoCC
, substitute
, # Specifies the major version used for the SDK. Uses `hostPlatform.darwinSdkVersion` by default.
darwinSdkMajorVersion ? lib.versions.major stdenv.hostPlatform.darwinSdkVersion
, # Enabling bootstrap disables propagation. Defaults to `false` (meaning to propagate certain packages and `xcrun`)
# except in stage0 of the Darwin stdenv bootstrap.
enableBootstrap ? stdenv.name == "bootstrap-stage0-stdenv-darwin"
, # Required by various phases
callPackage
,
}:
let
sdkInfo =
sdkVersions.${darwinSdkMajorVersion}
or (lib.throw "Unsupported SDK major version: ${darwinSdkMajorVersion}");
sdkVersion = sdkInfo.version;
fetchSDK = callPackage ./common/fetch-sdk.nix { };
phases = lib.composeManyExtensions (
[
(callPackage ./common/add-core-symbolication.nix { })
(callPackage ./common/derivation-options.nix { })
(callPackage ./common/passthru-private-frameworks.nix { inherit sdkVersion; })
(callPackage ./common/passthru-source-release-files.nix { inherit sdkVersion; })
(callPackage ./common/remove-disallowed-packages.nix { })
(callPackage ./common/process-stubs.nix { })
]
# Avoid infinite recursions by not propagating certain packages, so they can themselves build with the SDK.
++ lib.optionals (!enableBootstrap) [
(callPackage ./common/propagate-inputs.nix { })
(callPackage ./common/propagate-xcrun.nix { inherit sdkVersion; })
]
# This has to happen last.
++ [
(callPackage ./common/run-build-phase-hooks.nix { })
]
);
in
stdenvNoCC.mkDerivation (
lib.extends phases (finalAttrs: {
pname = "apple-sdk";
inherit (sdkInfo) version;
src = fetchSDK sdkInfo;
dontConfigure = true;
strictDeps = true;
setupHooks = [
# `role.bash` is copied from `../build-support/setup-hooks/role.bash` due to the requirements not to reference
# paths outside the package when it is in `by-name`. It needs to be kept in sync, but it fortunately does not
# change often. Once `build-support` is available as a package (or some other mechanism), it should be changed
# to whatever that replacement is.
./setup-hooks/role.bash
(substitute {
src = ./setup-hooks/sdk-hook.sh;
substitutions = [
"--subst-var-by"
"sdkVersion"
(lib.escapeShellArgs (lib.splitVersion sdkVersion))
];
})
];
installPhase =
let
sdkName = "MacOSX${lib.versions.majorMinor sdkVersion}.sdk";
sdkMajor = lib.versions.major sdkVersion;
in
''
runHook preInstall
mkdir -p "$sdkpath"
cp -rd . "$sdkpath/${sdkName}"
ln -s "${sdkName}" "$sdkpath/MacOSX${sdkMajor}.sdk"
ln -s "${sdkName}" "$sdkpath/MacOSX.sdk"
# Swift adds these locations to its search paths. Avoid spurious warnings by making sure they exist.
mkdir -p "$platformPath/Developer/Library/Frameworks"
mkdir -p "$platformPath/Developer/Library/PrivateFrameworks"
mkdir -p "$platformPath/Developer/usr/lib"
runHook postInstall
'';
passthru = {
sdkroot = finalAttrs.finalPackage + "/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk";
};
__structuredAttrs = true;
meta = {
description = "Frameworks and libraries required for building packages on Darwin";
homepage = "https://developer.apple.com";
teams = [ lib.teams.darwin ];
platforms = lib.platforms.darwin;
badPlatforms = [ lib.systems.inspect.patterns.is32bit ];
};
})
)

View File

@@ -1,48 +0,0 @@
From 6531da946949a94643e6d8424236174ae64fe0ca Mon Sep 17 00:00:00 2001
From: Randy Eckenrode <randy@largeandhighquality.com>
Date: Sat, 30 Sep 2023 18:02:39 -0400
Subject: [PATCH 1/2] Add function definitions needed to build zlog in
system_cmds
---
CoreSymbolication.h | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/CoreSymbolication.h b/CoreSymbolication.h
index a413860..f3cf63f 100644
--- a/CoreSymbolication.h
+++ b/CoreSymbolication.h
@@ -324,7 +324,9 @@ CSSymbolOwnerEditRelocations
CSSymbolOwnerForeachRegion
CSSymbolOwnerForeachRegionWithName
CSSymbolOwnerForeachSection
-CSSymbolOwnerForeachSegment
+*/
+void CSSymbolOwnerForeachSegment(CSSymbolOwnerRef owner, void (^block)(CSSegmentRef));
+/*
CSSymbolOwnerForeachSourceInfo
CSSymbolOwnerForeachSymbol
*/
@@ -333,7 +335,9 @@ void CSSymbolOwnerForeachSymbolWithName(CSSymbolOwnerRef owner, const char *sna
/*
CSSymbolOwnerGetArchitecture
CSSymbolOwnerGetBaseAddress
-CSSymbolOwnerGetCFUUIDBytes
+*/
+const CFUUIDBytes* CSSymbolOwnerGetCFUUIDBytes(CSSymbolOwnerRef owner);
+/*
CSSymbolOwnerGetCompatibilityVersion
CSSymbolOwnerGetCurrentVersion
CSSymbolOwnerGetDataFlags
@@ -390,7 +394,7 @@ CSSymbolOwnerSetLoadTimestamp
CSSymbolOwnerSetPath
CSSymbolOwnerSetRelocationCount
*/
-CSSymbolOwnerSetTransientUserData(CSSymbolOwnerRef owner, uint32_t gen);
+void CSSymbolOwnerSetTransientUserData(CSSymbolOwnerRef owner, uint32_t gen);
/*
CSSymbolOwnerSetUnloadTimestamp
*/
--
2.44.1

View File

@@ -1,45 +0,0 @@
From ae7ac6a7043dbae8e63d6ce5e63dfaf02b5977fe Mon Sep 17 00:00:00 2001
From: Randy Eckenrode <randy@largeandhighquality.com>
Date: Sat, 30 Sep 2023 18:37:18 -0400
Subject: [PATCH 2/2] Add CF_EXPORT To const symbols
---
CoreSymbolication.h | 15 ++++++++-------
1 file changed, 8 insertions(+), 7 deletions(-)
diff --git a/CoreSymbolication.h b/CoreSymbolication.h
index f3cf63f..4124a54 100644
--- a/CoreSymbolication.h
+++ b/CoreSymbolication.h
@@ -49,6 +49,7 @@
#include <CoreFoundation/CoreFoundation.h>
+#include <CoreFoundation/CFBase.h>
#include <mach/mach.h>
@@ -139,13 +140,13 @@ typedef void (^CSSegmentIterator)(CSSegmentRef segment);
* External symbols
*/
-const char* kCSRegionMachHeaderName;
-const CSDictionaryKeyCallBacks kCSTypeDictionaryKeyCallBacks;
-const CSDictionaryValueCallBacks kCSTypeDictionaryValueCallBacks;
-const CSDictionaryKeyCallBacks kCSTypeDictionaryWeakKeyCallBacks;
-const CSDictionaryValueCallBacks kCSTypeDictionaryWeakValueCallBacks;
-const CSSetCallBacks kCSTypeSetCallBacks;
-const CSSetCallBacks kCSTypeSetWeakCallBacks;
+CF_EXPORT const char* kCSRegionMachHeaderName;
+CF_EXPORT const CSDictionaryKeyCallBacks kCSTypeDictionaryKeyCallBacks;
+CF_EXPORT const CSDictionaryValueCallBacks kCSTypeDictionaryValueCallBacks;
+CF_EXPORT const CSDictionaryKeyCallBacks kCSTypeDictionaryWeakKeyCallBacks;
+CF_EXPORT const CSDictionaryValueCallBacks kCSTypeDictionaryWeakValueCallBacks;
+CF_EXPORT const CSSetCallBacks kCSTypeSetCallBacks;
+CF_EXPORT const CSSetCallBacks kCSTypeSetWeakCallBacks;
/*
--
2.44.1

View File

@@ -1,41 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils curl file gzip jq xcbuild yq
set -eu -o pipefail
catalog=${1-}
if [ -z "$catalog" ]; then
echo "usage: get-sdks-from-catalog.sh <catalog>"
echo " <catalog> Apple software update catalog (may be gzipped)" >&2
exit 1
fi
scratch=$(mktemp)
trap 'rm -f -- "$scratch"' EXIT
if [[ "$(file "$catalog")" =~ gzip ]]; then
gzcat "$catalog" >"$scratch"
else
cp --reflink=auto "$catalog" "$scratch"
fi
# Grab all SDK packages from the catalog
filter='.Products[].Packages[] | select(.URL | test(".*CLTools_macOSNMOS_SDK.pkg")) | "\(.URL)|\(.MetadataURL)"'
declare -A package_list
for package in $(plutil -convert json -o - "$scratch" | jq -r "$filter"); do
package_list[${package%%|*}]=${package#*|}
done
truncate --size 0 "$scratch"
for pkg in "${!package_list[@]}"; do
ver=$(curl --silent "${package_list[$pkg]}" | xq -r '."pkg-info"."@version"')
echo "{\"url\": \"$pkg\", \"version\": \"$(cut -d. -f1-3 <<<"$ver")\", \"long_version\": \"$ver\"}" >>"$scratch"
done
jq -r --slurp '
group_by(.version | split(".")[0])
| map(max_by(.version))
| sort_by(.version)[]
| "Package URL: \(.url)\n Xcode Ver: \(.version) (\(.long_version))\n"' "$scratch"

View File

@@ -1,70 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils curl git gnutar jq moreutils nix
set -eu -o pipefail
if [ ! -v 2 ]; then
echo "usage: lock-sdk-deps.sh <SDK version> <Packages>" >&2
echo " <SDK version> Decimal-separated version number." >&2
echo " Must correspond to a tag in https://github.com/apple-oss-distributions/distribution-macOS" >&2
echo " <Packages> List of packages from the distributions-macOS repository." >&2
echo " Packages not in the repository at the tag for <SDK version> will be ignored."
exit 1
fi
pkgdir=$(dirname "$(dirname "$(realpath "$0")")")
lockfile=$pkgdir/metadata/apple-oss-lockfile.json
if [ ! -e "$lockfile" ]; then
touch "$lockfile"
fi
workdir=$(mktemp -d)
trap 'rm -rf -- "$workdir"' EXIT
sdkVersion=$1
shift
tag="macos-${sdkVersion//./}"
declare -a packages=("$@")
echo "Locking versions for macOS $sdkVersion using tag '$tag'..."
pushd "$workdir" >/dev/null
git clone --branch "$tag" https://github.com/apple-oss-distributions/distribution-macOS.git &>/dev/null
cd distribution-macOS
for package in "${packages[@]}"; do
# If the tag exists in `release.json`, use that as an optimization to avoid downloading unnecessarily from Github.
packageTag=$(jq -r --arg package "$package" '.projects[] | select(.project == $package) | .tag' release.json)
packageCommit=$(git ls-tree -d HEAD "$package" | awk '{print $3}')
if [ ! -d "$package" ]; then
packageCommit=HEAD
fi
# However, sometimes it doesnt exist. In that case, fall back to cloning the repo and check manually
# which tag corresponds to the commit from the submodule.
if [ -z "$packageTag" ]; then
git clone --no-checkout "https://github.com/apple-oss-distributions/$package.git" ../source &>/dev/null
pushd ../source >/dev/null
packageTag=$(git tag --points-at "$packageCommit")
popd >/dev/null
rm -rf ../source
fi
packageVersion=${packageTag##"$package"-}
curl -OL "https://github.com/apple-oss-distributions/$package/archive/$packageTag.tar.gz" &>/dev/null
tar axf "$packageTag.tar.gz"
packageHash=$(nix --extra-experimental-features nix-command hash path "$package-$packageTag")
pkgsjson="{\"$sdkVersion\": {\"$package\": {\"version\": \"$packageVersion\", \"hash\": \"$packageHash\"}}}"
echo " - Locking $package to version $packageVersion with hash '$packageHash'"
jq --argjson pkg "$pkgsjson" -S '. * $pkg' "$lockfile" | sponge "$lockfile"
done
popd >/dev/null

View File

@@ -1,62 +0,0 @@
#!/usr/bin/env nix-shell
#!nix-shell -i bash -p coreutils jq
set -eu -o pipefail
pkgdir=$(dirname "$(dirname "$(realpath "$0")")")
echo '{}' >"$pkgdir/metadata/apple-oss-lockfile.json"
declare -a versions
readarray -t versions < <(jq -r '.[].version' "$pkgdir/metadata/versions.json")
declare -a packages=(
CarbonHeaders
CommonCrypto
IOAudioFamily
IOFireWireFamily
IOFWDVComponents
IOFireWireAVC
IOFireWireSBP2
IOFireWireSerialBusProtocolTransport
IOGraphics
IOHIDFamily
IONetworkingFamily
IOSerialFamily
IOStorageFamily
IOBDStorageFamily
IOCDStorageFamily
IODVDStorageFamily
IOUSBFamily
IOKitUser
Libc
Libinfo
Libm
Libnotify
Librpcsvc
Libsystem
OpenDirectory
Security
architecture
configd
copyfile
dtrace
dyld
eap8021x
hfs
launchd
libclosure
libdispatch
libmalloc
libplatform
libpthread
mDNSResponder
objc4
ppp
removefile
xnu
)
for version in "${versions[@]}"; do
"$pkgdir/scripts/lock-sdk-deps.sh" "$version" "${packages[@]}"
done

View File

@@ -1,6 +0,0 @@
function enablePrivateFrameworks() {
export NIX_CFLAGS_COMPILE+=" -iframework $DEVELOPER_DIR/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/PrivateFrameworks"
export NIX_LDFLAGS+=" -F$DEVELOPER_DIR/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/System/Library/PrivateFrameworks"
}
preConfigureHooks+=(enablePrivateFrameworks)

View File

@@ -1,71 +0,0 @@
# Since the same derivation can be depended on in multiple ways, we need to
# accumulate *each* role (i.e. host and target platforms relative the depending
# derivation) in which the derivation is used.
#
# The role is intended to be used as part of other variables names like
# - $NIX_SOMETHING${role_post}
function getRole() {
case $1 in
-1)
role_post='_FOR_BUILD'
;;
0)
role_post=''
;;
1)
role_post='_FOR_TARGET'
;;
*)
echo "@name@: used as improper sort of dependency" >&2
return 1
;;
esac
}
# `hostOffset` describes how the host platform of the package is slid relative
# to the depending package. `targetOffset` likewise describes the target
# platform of the package. Both are brought into scope of the setup hook defined
# for dependency whose setup hook is being processed relative to the package
# being built.
function getHostRole() {
getRole "$hostOffset"
}
function getTargetRole() {
getRole "$targetOffset"
}
# `depHostOffset` describes how the host platform of the dependencies are slid
# relative to the depending package. `depTargetOffset` likewise describes the
# target platform of dependenices. Both are brought into scope of the
# environment hook defined for the dependency being applied relative to the
# package being built.
function getHostRoleEnvHook() {
getRole "$depHostOffset"
}
function getTargetRoleEnvHook() {
getRole "$depTargetOffset"
}
# This variant is intended specifically for code-producing tool wrapper scripts
# `NIX_@wrapperName@_TARGET_*_@suffixSalt@` tracks this (needs to be an exported
# env var so can't use fancier data structures).
function getTargetRoleWrapper() {
case $targetOffset in
-1)
export NIX_@wrapperName@_TARGET_BUILD_@suffixSalt@=1
;;
0)
export NIX_@wrapperName@_TARGET_HOST_@suffixSalt@=1
;;
1)
export NIX_@wrapperName@_TARGET_TARGET_@suffixSalt@=1
;;
*)
echo "@name@: used as improper sort of dependency" >&2
return 1
;;
esac
}

View File

@@ -1,17 +0,0 @@
local role_post
getHostRole
local sdkVersionVar=NIX_APPLE_SDK_VERSION${role_post}
local developerDirVar=DEVELOPER_DIR${role_post}
local sdkVersionArr=(@sdkVersion@)
local sdkVersion
sdkVersion=$(printf "%02d%02d%02d" "${sdkVersionArr[0]-0}" "${sdkVersionArr[1]-0}" "${sdkVersionArr[2]-0}")
if [ "$sdkVersion" -gt "${!sdkVersionVar-000000}" ]; then
export "$developerDirVar"='@out@'
export "$sdkVersionVar"="$sdkVersion"
export "SDKROOT${role_post}"="${!developerDirVar}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk"
fi
unset -v role_post developerDirVar sdkVersion sdkVersionArr sdkVersionVar

View File

@@ -86,7 +86,6 @@ let
(lib.cmakeOptionType "filepath" "FETCHCONTENT_SOURCE_DIR_NANOBIND" "${nanobind}")
(lib.cmakeBool "FETCHCONTENT_FULLY_DISCONNECTED" true)
(lib.cmakeBool "MLX_BUILD_METAL" true)
(lib.cmakeBool "MLX_BUILD_CPU" true)
(lib.cmakeOptionType "filepath" "FETCHCONTENT_SOURCE_DIR_METAL_CPP" "${metal_cpp}")
(lib.cmakeOptionType "string" "CMAKE_OSX_DEPLOYMENT_TARGET" "${apple-sdk_26.version}")
(lib.cmakeOptionType "filepath" "CMAKE_OSX_SYSROOT" "${apple-sdk_26.passthru.sdkroot}")

View File

@@ -26,7 +26,7 @@ dependencies = [
"httpx>=0.28.1",
"tomlkit>=0.14.0",
"pillow>=11.0,<12.0", # compatibility with mflux
"mflux==0.15.4",
"mflux==0.15.5",
"python-multipart>=0.0.21",
]

View File

@@ -0,0 +1,45 @@
model_id = "exolabs/FLUX.1-Kontext-dev-4bit"
n_layers = 57
hidden_size = 1
supports_tensor = false
tasks = ["ImageToImage"]
[storage_size]
in_bytes = 15475325472
[[components]]
component_name = "text_encoder"
component_path = "text_encoder/"
n_layers = 12
can_shard = false
[components.storage_size]
in_bytes = 0
[[components]]
component_name = "text_encoder_2"
component_path = "text_encoder_2/"
n_layers = 24
can_shard = false
safetensors_index_filename = "model.safetensors.index.json"
[components.storage_size]
in_bytes = 9524621312
[[components]]
component_name = "transformer"
component_path = "transformer/"
n_layers = 57
can_shard = true
safetensors_index_filename = "diffusion_pytorch_model.safetensors.index.json"
[components.storage_size]
in_bytes = 5950704160
[[components]]
component_name = "vae"
component_path = "vae/"
can_shard = false
[components.storage_size]
in_bytes = 0

View File

@@ -0,0 +1,45 @@
model_id = "exolabs/FLUX.1-Kontext-dev-8bit"
n_layers = 57
hidden_size = 1
supports_tensor = false
tasks = ["ImageToImage"]
[storage_size]
in_bytes = 21426029632
[[components]]
component_name = "text_encoder"
component_path = "text_encoder/"
n_layers = 12
can_shard = false
[components.storage_size]
in_bytes = 0
[[components]]
component_name = "text_encoder_2"
component_path = "text_encoder_2/"
n_layers = 24
can_shard = false
safetensors_index_filename = "model.safetensors.index.json"
[components.storage_size]
in_bytes = 9524621312
[[components]]
component_name = "transformer"
component_path = "transformer/"
n_layers = 57
can_shard = true
safetensors_index_filename = "diffusion_pytorch_model.safetensors.index.json"
[components.storage_size]
in_bytes = 11901408320
[[components]]
component_name = "vae"
component_path = "vae/"
can_shard = false
[components.storage_size]
in_bytes = 0

View File

@@ -0,0 +1,45 @@
model_id = "exolabs/FLUX.1-Kontext-dev"
n_layers = 57
hidden_size = 1
supports_tensor = false
tasks = ["ImageToImage"]
[storage_size]
in_bytes = 33327437952
[[components]]
component_name = "text_encoder"
component_path = "text_encoder/"
n_layers = 12
can_shard = false
[components.storage_size]
in_bytes = 0
[[components]]
component_name = "text_encoder_2"
component_path = "text_encoder_2/"
n_layers = 24
can_shard = false
safetensors_index_filename = "model.safetensors.index.json"
[components.storage_size]
in_bytes = 9524621312
[[components]]
component_name = "transformer"
component_path = "transformer/"
n_layers = 57
can_shard = true
safetensors_index_filename = "diffusion_pytorch_model.safetensors.index.json"
[components.storage_size]
in_bytes = 23802816640
[[components]]
component_name = "vae"
component_path = "vae/"
can_shard = false
[components.storage_size]
in_bytes = 0

View File

@@ -3,7 +3,6 @@
import time
from collections.abc import AsyncGenerator
from typing import Any
from uuid import uuid4
from exo.shared.types.api import (
ChatCompletionChoice,
@@ -141,7 +140,7 @@ async def generate_chat_stream(
if isinstance(chunk, ToolCallChunk):
tool_call_deltas = [
ToolCall(
id=str(uuid4()),
id=tool.id,
index=i,
function=tool,
)
@@ -207,7 +206,7 @@ async def collect_chat_response(
if isinstance(chunk, ToolCallChunk):
tool_calls.extend(
ToolCall(
id=str(uuid4()),
id=tool.id,
index=i,
function=tool,
)

View File

@@ -3,7 +3,6 @@
import json
from collections.abc import AsyncGenerator
from typing import Any
from uuid import uuid4
from exo.shared.types.api import FinishReason
from exo.shared.types.chunks import ErrorChunk, TokenChunk, ToolCallChunk
@@ -179,7 +178,7 @@ async def collect_claude_response(
for tool in chunk.tool_calls:
tool_use_blocks.append(
ClaudeToolUseBlock(
id=f"toolu_{uuid4().hex[:24]}",
id=f"toolu_{tool.id}",
name=tool.name,
input=json.loads(tool.arguments), # pyright: ignore[reportAny]
)
@@ -264,7 +263,7 @@ async def generate_claude_stream(
# Emit tool_use content blocks
for tool in chunk.tool_calls:
tool_id = f"toolu_{uuid4().hex[:24]}"
tool_id = f"toolu_{tool.id}"
tool_input_json = tool.arguments
# content_block_start for tool_use

View File

@@ -3,7 +3,6 @@
from collections.abc import AsyncGenerator
from itertools import count
from typing import Any
from uuid import uuid4
from exo.shared.types.chunks import ErrorChunk, TokenChunk, ToolCallChunk
from exo.shared.types.common import CommandId
@@ -140,8 +139,8 @@ async def collect_responses_response(
for tool in chunk.tool_calls:
function_call_items.append(
ResponseFunctionCallItem(
id=f"fc_{uuid4().hex[:24]}",
call_id=f"call_{uuid4().hex[:24]}",
id=f"fc_{tool.id}",
call_id=f"call_{tool.id}",
name=tool.name,
arguments=tool.arguments,
)
@@ -246,8 +245,8 @@ async def generate_responses_stream(
if isinstance(chunk, ToolCallChunk):
last_stats = chunk.stats or last_stats
for tool in chunk.tool_calls:
fc_id = f"fc_{uuid4().hex[:24]}"
call_id = f"call_{uuid4().hex[:24]}"
fc_id = f"fc_{tool.id}"
call_id = f"call_{tool.id}"
# response.output_item.added for function_call
fc_item = ResponseFunctionCallItem(

View File

@@ -1,6 +1,7 @@
import time
from collections.abc import Generator
from typing import Annotated, Any, Literal
from uuid import uuid4
from pydantic import BaseModel, Field, field_validator
from pydantic_core import PydanticUseDefault
@@ -60,6 +61,7 @@ class ChatCompletionMessageText(BaseModel):
class ToolCallItem(BaseModel):
id: str = Field(default_factory=lambda: str(uuid4()))
name: str
arguments: str
@@ -272,6 +274,7 @@ class AdvancedImageParams(BaseModel):
num_inference_steps: Annotated[int, Field(ge=1, le=100)] | None = None
guidance: Annotated[float, Field(ge=1.0, le=20.0)] | None = None
negative_prompt: str | None = None
num_sync_steps: Annotated[int, Field(ge=1, le=100)] | None = None
class ImageGenerationTaskParams(BaseModel):

View File

@@ -1,5 +1,4 @@
from enum import Enum
from math import ceil
from pydantic import BaseModel
@@ -23,7 +22,7 @@ class ImageModelConfig(BaseModel):
block_configs: tuple[TransformerBlockConfig, ...]
default_steps: dict[str, int] # {"low": X, "medium": Y, "high": Z}
num_sync_steps_factor: float # Fraction of steps for sync phase
num_sync_steps: int # Number of sync steps for distributed inference
guidance_scale: float | None = None # None or <= 1.0 disables CFG
@@ -45,6 +44,3 @@ class ImageModelConfig(BaseModel):
def get_steps_for_quality(self, quality: str) -> int:
return self.default_steps[quality]
def get_num_sync_steps(self, steps: int) -> int:
return ceil(steps * self.num_sync_steps_factor)

View File

@@ -150,7 +150,10 @@ class DistributedImageModel:
guidance=guidance_override if guidance_override is not None else 4.0,
)
num_sync_steps = self._config.get_num_sync_steps(steps)
if advanced_params is not None and advanced_params.num_sync_steps is not None:
num_sync_steps = advanced_params.num_sync_steps
else:
num_sync_steps = self._config.num_sync_steps
for result in self._runner.generate_image(
runtime_config=config,

View File

@@ -5,7 +5,9 @@ from exo.worker.engines.image.config import ImageModelConfig
from exo.worker.engines.image.models.base import ModelAdapter
from exo.worker.engines.image.models.flux import (
FLUX_DEV_CONFIG,
FLUX_KONTEXT_CONFIG,
FLUX_SCHNELL_CONFIG,
FluxKontextModelAdapter,
FluxModelAdapter,
)
from exo.worker.engines.image.models.qwen import (
@@ -26,13 +28,16 @@ AdapterFactory = Callable[
# Registry maps model_family string to adapter factory
_ADAPTER_REGISTRY: dict[str, AdapterFactory] = {
"flux": FluxModelAdapter,
"flux-kontext": FluxKontextModelAdapter,
"qwen-edit": QwenEditModelAdapter,
"qwen": QwenModelAdapter,
}
# Config registry: maps model ID patterns to configs
# Order matters: longer/more-specific patterns must come before shorter ones
_CONFIG_REGISTRY: dict[str, ImageModelConfig] = {
"flux.1-schnell": FLUX_SCHNELL_CONFIG,
"flux.1-kontext": FLUX_KONTEXT_CONFIG, # Must come before "flux.1-dev" for pattern matching
"flux.1-krea-dev": FLUX_DEV_CONFIG, # Must come before "flux.1-dev" for pattern matching
"flux.1-dev": FLUX_DEV_CONFIG,
"qwen-image-edit": QWEN_IMAGE_EDIT_CONFIG, # Must come before "qwen-image" for pattern matching

View File

@@ -66,6 +66,19 @@ class PromptData(ABC):
"""
...
@property
@abstractmethod
def kontext_image_ids(self) -> mx.array | None:
"""Kontext-style position IDs for image conditioning.
For FLUX.1-Kontext models, returns position IDs with first_coord=1
to distinguish conditioning tokens from generation tokens (first_coord=0).
Returns:
Position IDs array [1, seq_len, 3] for Kontext, None for other models.
"""
...
@abstractmethod
def get_batched_cfg_data(
self,

View File

@@ -1,11 +1,17 @@
from exo.worker.engines.image.models.flux.adapter import FluxModelAdapter
from exo.worker.engines.image.models.flux.config import (
FLUX_DEV_CONFIG,
FLUX_KONTEXT_CONFIG,
FLUX_SCHNELL_CONFIG,
)
from exo.worker.engines.image.models.flux.kontext_adapter import (
FluxKontextModelAdapter,
)
__all__ = [
"FluxModelAdapter",
"FluxKontextModelAdapter",
"FLUX_DEV_CONFIG",
"FLUX_KONTEXT_CONFIG",
"FLUX_SCHNELL_CONFIG",
]

View File

@@ -59,6 +59,10 @@ class FluxPromptData(PromptData):
def conditioning_latents(self) -> mx.array | None:
return None
@property
def kontext_image_ids(self) -> mx.array | None:
return None
def get_batched_cfg_data(
self,
) -> tuple[mx.array, mx.array, mx.array | None, mx.array | None] | None:

View File

@@ -15,7 +15,7 @@ FLUX_SCHNELL_CONFIG = ImageModelConfig(
),
),
default_steps={"low": 1, "medium": 2, "high": 4},
num_sync_steps_factor=0.5, # 1 sync step for medium (2 steps)
num_sync_steps=1,
)
@@ -30,5 +30,21 @@ FLUX_DEV_CONFIG = ImageModelConfig(
),
),
default_steps={"low": 10, "medium": 25, "high": 50},
num_sync_steps_factor=0.125, # ~3 sync steps for medium (25 steps)
num_sync_steps=4,
)
FLUX_KONTEXT_CONFIG = ImageModelConfig(
model_family="flux-kontext",
block_configs=(
TransformerBlockConfig(
block_type=BlockType.JOINT, count=19, has_separate_text_output=True
),
TransformerBlockConfig(
block_type=BlockType.SINGLE, count=38, has_separate_text_output=False
),
),
default_steps={"low": 10, "medium": 25, "high": 50},
num_sync_steps=4,
guidance_scale=4.0,
)

View File

@@ -0,0 +1,348 @@
import math
from pathlib import Path
from typing import Any, final
import mlx.core as mx
from mflux.models.common.config.config import Config
from mflux.models.common.config.model_config import ModelConfig
from mflux.models.flux.latent_creator.flux_latent_creator import FluxLatentCreator
from mflux.models.flux.model.flux_text_encoder.prompt_encoder import PromptEncoder
from mflux.models.flux.model.flux_transformer.transformer import Transformer
from mflux.models.flux.variants.kontext.flux_kontext import Flux1Kontext
from mflux.models.flux.variants.kontext.kontext_util import KontextUtil
from exo.worker.engines.image.config import ImageModelConfig
from exo.worker.engines.image.models.base import (
ModelAdapter,
PromptData,
RotaryEmbeddings,
)
from exo.worker.engines.image.models.flux.wrappers import (
FluxJointBlockWrapper,
FluxSingleBlockWrapper,
)
from exo.worker.engines.image.pipeline.block_wrapper import (
JointBlockWrapper,
SingleBlockWrapper,
)
@final
class FluxKontextPromptData(PromptData):
"""Prompt data for FLUX.1-Kontext image editing.
Stores text embeddings along with conditioning latents and position IDs
for the input image.
"""
def __init__(
self,
prompt_embeds: mx.array,
pooled_prompt_embeds: mx.array,
conditioning_latents: mx.array,
kontext_image_ids: mx.array,
):
self._prompt_embeds = prompt_embeds
self._pooled_prompt_embeds = pooled_prompt_embeds
self._conditioning_latents = conditioning_latents
self._kontext_image_ids = kontext_image_ids
@property
def prompt_embeds(self) -> mx.array:
return self._prompt_embeds
@property
def pooled_prompt_embeds(self) -> mx.array:
return self._pooled_prompt_embeds
@property
def negative_prompt_embeds(self) -> mx.array | None:
return None
@property
def negative_pooled_prompt_embeds(self) -> mx.array | None:
return None
def get_encoder_hidden_states_mask(self, positive: bool = True) -> mx.array | None:
return None
@property
def cond_image_grid(
self,
) -> tuple[int, int, int] | list[tuple[int, int, int]] | None:
return None
@property
def conditioning_latents(self) -> mx.array | None:
"""VAE-encoded input image latents for Kontext conditioning."""
return self._conditioning_latents
@property
def kontext_image_ids(self) -> mx.array | None:
"""Position IDs for Kontext conditioning (first_coord=1)."""
return self._kontext_image_ids
def get_cfg_branch_data(
self, positive: bool
) -> tuple[mx.array, mx.array | None, mx.array | None, mx.array | None]:
"""Kontext doesn't use CFG, but we return positive data for compatibility."""
return (
self._prompt_embeds,
None,
self._pooled_prompt_embeds,
self._conditioning_latents,
)
def get_batched_cfg_data(
self,
) -> tuple[mx.array, mx.array, mx.array | None, mx.array | None] | None:
# Kontext doesn't use CFG
return None
@final
class FluxKontextModelAdapter(ModelAdapter[Flux1Kontext, Transformer]):
"""Adapter for FLUX.1-Kontext image editing model.
Key differences from standard FluxModelAdapter:
- Takes an input image and computes output dimensions from it
- Creates conditioning latents from the input image via VAE
- Creates special position IDs (kontext_image_ids) for conditioning tokens
- Creates pure noise latents (not img2img blending)
"""
def __init__(
self,
config: ImageModelConfig,
model_id: str,
local_path: Path,
quantize: int | None = None,
):
self._config = config
self._model = Flux1Kontext(
model_config=ModelConfig.from_name(model_name=model_id, base_model=None),
model_path=str(local_path),
quantize=quantize,
)
self._transformer = self._model.transformer
# Stores image path and computed dimensions after set_image_dimensions
self._image_path: str | None = None
self._output_height: int | None = None
self._output_width: int | None = None
@property
def hidden_dim(self) -> int:
return self._transformer.x_embedder.weight.shape[0] # pyright: ignore[reportUnknownMemberType, reportUnknownVariableType]
@property
def needs_cfg(self) -> bool:
return False
def _get_latent_creator(self) -> type:
return FluxLatentCreator
def get_joint_block_wrappers(
self,
text_seq_len: int,
encoder_hidden_states_mask: mx.array | None = None,
) -> list[JointBlockWrapper[Any]]:
"""Create wrapped joint blocks for Flux Kontext."""
return [
FluxJointBlockWrapper(block, text_seq_len)
for block in self._transformer.transformer_blocks
]
def get_single_block_wrappers(
self,
text_seq_len: int,
) -> list[SingleBlockWrapper[Any]]:
"""Create wrapped single blocks for Flux Kontext."""
return [
FluxSingleBlockWrapper(block, text_seq_len)
for block in self._transformer.single_transformer_blocks
]
def slice_transformer_blocks(
self,
start_layer: int,
end_layer: int,
):
all_joint = list(self._transformer.transformer_blocks)
all_single = list(self._transformer.single_transformer_blocks)
total_joint_blocks = len(all_joint)
if end_layer <= total_joint_blocks:
# All assigned are joint blocks
joint_start, joint_end = start_layer, end_layer
single_start, single_end = 0, 0
elif start_layer >= total_joint_blocks:
# All assigned are single blocks
joint_start, joint_end = 0, 0
single_start = start_layer - total_joint_blocks
single_end = end_layer - total_joint_blocks
else:
# Spans both joint and single
joint_start, joint_end = start_layer, total_joint_blocks
single_start = 0
single_end = end_layer - total_joint_blocks
self._transformer.transformer_blocks = all_joint[joint_start:joint_end]
self._transformer.single_transformer_blocks = all_single[
single_start:single_end
]
def set_image_dimensions(self, image_path: Path) -> tuple[int, int]:
"""Compute and store dimensions from input image.
Also stores image_path for use in encode_prompt().
Args:
image_path: Path to the input image
Returns:
(output_width, output_height) for runtime config
"""
from mflux.utils.image_util import ImageUtil
pil_image = ImageUtil.load_image(str(image_path)).convert("RGB")
image_size = pil_image.size
# Compute output dimensions from input image aspect ratio
# Target area of 1024x1024 = ~1M pixels
target_area = 1024 * 1024
ratio = image_size[0] / image_size[1]
output_width = math.sqrt(target_area * ratio)
output_height = output_width / ratio
output_width = round(output_width / 32) * 32
output_height = round(output_height / 32) * 32
# Ensure multiple of 16 for VAE
vae_scale_factor = 8
multiple_of = vae_scale_factor * 2
output_width = output_width // multiple_of * multiple_of
output_height = output_height // multiple_of * multiple_of
self._image_path = str(image_path)
self._output_width = int(output_width)
self._output_height = int(output_height)
return self._output_width, self._output_height
def create_latents(self, seed: int, runtime_config: Config) -> mx.array:
"""Create initial noise latents for Kontext.
Unlike standard img2img which blends noise with encoded input,
Kontext uses pure noise latents. The input image is provided
separately as conditioning.
"""
return FluxLatentCreator.create_noise(
seed=seed,
height=runtime_config.height,
width=runtime_config.width,
)
def encode_prompt(
self, prompt: str, negative_prompt: str | None = None
) -> FluxKontextPromptData:
"""Encode prompt and create conditioning from stored input image.
Must call set_image_dimensions() before this method.
Args:
prompt: Text prompt for editing
negative_prompt: Ignored (Kontext doesn't use CFG)
Returns:
FluxKontextPromptData with text embeddings and image conditioning
"""
del negative_prompt # Kontext doesn't support negative prompts or CFG
if (
self._image_path is None
or self._output_height is None
or self._output_width is None
):
raise RuntimeError(
"set_image_dimensions() must be called before encode_prompt() "
"for FluxKontextModelAdapter"
)
assert isinstance(self.model.prompt_cache, dict)
assert isinstance(self.model.tokenizers, dict)
# Encode text prompt
prompt_embeds, pooled_prompt_embeds = PromptEncoder.encode_prompt(
prompt=prompt,
prompt_cache=self.model.prompt_cache,
t5_tokenizer=self.model.tokenizers["t5"], # pyright: ignore[reportAny]
clip_tokenizer=self.model.tokenizers["clip"], # pyright: ignore[reportAny]
t5_text_encoder=self.model.t5_text_encoder,
clip_text_encoder=self.model.clip_text_encoder,
)
# Create conditioning latents from input image
conditioning_latents, kontext_image_ids = (
KontextUtil.create_image_conditioning_latents(
vae=self.model.vae,
height=self._output_height,
width=self._output_width,
image_path=self._image_path,
)
)
return FluxKontextPromptData(
prompt_embeds=prompt_embeds,
pooled_prompt_embeds=pooled_prompt_embeds,
conditioning_latents=conditioning_latents,
kontext_image_ids=kontext_image_ids,
)
def compute_embeddings(
self,
hidden_states: mx.array,
prompt_embeds: mx.array,
) -> tuple[mx.array, mx.array]:
embedded_hidden = self._transformer.x_embedder(hidden_states)
embedded_encoder = self._transformer.context_embedder(prompt_embeds)
return embedded_hidden, embedded_encoder
def compute_text_embeddings(
self,
t: int,
runtime_config: Config,
pooled_prompt_embeds: mx.array | None = None,
hidden_states: mx.array | None = None,
) -> mx.array:
if pooled_prompt_embeds is None:
raise ValueError(
"pooled_prompt_embeds is required for Flux Kontext text embeddings"
)
return Transformer.compute_text_embeddings(
t, pooled_prompt_embeds, self._transformer.time_text_embed, runtime_config
)
def compute_rotary_embeddings(
self,
prompt_embeds: mx.array,
runtime_config: Config,
encoder_hidden_states_mask: mx.array | None = None,
cond_image_grid: tuple[int, int, int]
| list[tuple[int, int, int]]
| None = None,
kontext_image_ids: mx.array | None = None,
) -> RotaryEmbeddings:
return Transformer.compute_rotary_embeddings(
prompt_embeds,
self._transformer.pos_embed,
runtime_config,
kontext_image_ids,
)
def apply_guidance(
self,
noise_positive: mx.array,
noise_negative: mx.array,
guidance_scale: float,
) -> mx.array:
raise NotImplementedError("Flux Kontext does not use classifier-free guidance")

View File

@@ -69,6 +69,10 @@ class QwenPromptData(PromptData):
def conditioning_latents(self) -> mx.array | None:
return None
@property
def kontext_image_ids(self) -> mx.array | None:
return None
def get_batched_cfg_data(
self,
) -> tuple[mx.array, mx.array, mx.array | None, mx.array | None] | None:

View File

@@ -12,7 +12,7 @@ QWEN_IMAGE_CONFIG = ImageModelConfig(
),
),
default_steps={"low": 10, "medium": 25, "high": 50},
num_sync_steps_factor=0.25,
num_sync_steps=7,
guidance_scale=3.5, # Set to None or < 1.0 to disable CFG
)
@@ -24,6 +24,6 @@ QWEN_IMAGE_EDIT_CONFIG = ImageModelConfig(
),
),
default_steps={"low": 10, "medium": 25, "high": 50},
num_sync_steps_factor=0.25,
num_sync_steps=7,
guidance_scale=3.5,
)

View File

@@ -85,6 +85,10 @@ class QwenEditPromptData(PromptData):
def qwen_image_ids(self) -> mx.array:
return self._qwen_image_ids
@property
def kontext_image_ids(self) -> mx.array | None:
return None
@property
def is_edit_mode(self) -> bool:
return True

View File

@@ -567,6 +567,7 @@ class DiffusionRunner:
| list[tuple[int, int, int]]
| None = None,
conditioning_latents: mx.array | None = None,
kontext_image_ids: mx.array | None = None,
) -> mx.array:
"""Run a single forward pass through the transformer.
Args:
@@ -578,6 +579,7 @@ class DiffusionRunner:
encoder_hidden_states_mask: Attention mask for text (Qwen)
cond_image_grid: Conditioning image grid dimensions (Qwen edit)
conditioning_latents: Conditioning latents for edit mode
kontext_image_ids: Position IDs for Kontext conditioning (Flux Kontext)
Returns:
Noise prediction tensor
@@ -610,6 +612,7 @@ class DiffusionRunner:
config,
encoder_hidden_states_mask=encoder_hidden_states_mask,
cond_image_grid=cond_image_grid,
kontext_image_ids=kontext_image_ids,
)
assert self.joint_block_wrappers is not None
@@ -681,6 +684,7 @@ class DiffusionRunner:
prompt_data: PromptData,
) -> mx.array:
cond_image_grid = prompt_data.cond_image_grid
kontext_image_ids = prompt_data.kontext_image_ids
results: list[tuple[bool, mx.array]] = []
for branch in self._get_cfg_branches(prompt_data):
@@ -700,6 +704,7 @@ class DiffusionRunner:
encoder_hidden_states_mask=branch.mask,
cond_image_grid=cond_image_grid,
conditioning_latents=branch.cond_latents,
kontext_image_ids=kontext_image_ids,
)
results.append((branch.positive, noise))
@@ -902,10 +907,10 @@ class DiffusionRunner:
config: Config,
hidden_states: mx.array,
prompt_data: PromptData,
kontext_image_ids: mx.array | None = None,
) -> mx.array:
prev_latents = hidden_states
cond_image_grid = prompt_data.cond_image_grid
kontext_image_ids = prompt_data.kontext_image_ids
scaled_hidden_states = config.scheduler.scale_model_input(hidden_states, t) # pyright: ignore[reportAny]
original_latent_tokens: int = scaled_hidden_states.shape[1] # pyright: ignore[reportAny]
@@ -979,10 +984,10 @@ class DiffusionRunner:
latents: mx.array,
prompt_data: PromptData,
is_first_async_step: bool,
kontext_image_ids: mx.array | None = None,
) -> mx.array:
patch_latents, token_indices = self._create_patches(latents, config)
cond_image_grid = prompt_data.cond_image_grid
kontext_image_ids = prompt_data.kontext_image_ids
prev_patch_latents = [p for p in patch_latents]

View File

@@ -386,7 +386,15 @@ def tensor_auto_parallel(
all_to_sharded_linear_in_place,
sharded_to_all_linear_in_place,
)
elif isinstance(model, (Qwen3MoeModel, Glm4MoeModel, Qwen3NextModel)):
elif isinstance(model, Glm4MoeModel):
tensor_parallel_sharding_strategy = Glm4MoeShardingStrategy(
group,
all_to_sharded_linear,
sharded_to_all_linear,
all_to_sharded_linear_in_place,
sharded_to_all_linear_in_place,
)
elif isinstance(model, (Qwen3MoeModel, Qwen3NextModel)):
tensor_parallel_sharding_strategy = QwenShardingStrategy(
group,
all_to_sharded_linear,
@@ -512,9 +520,6 @@ class DeepSeekShardingStrategy(TensorParallelShardingStrategy):
layer.self_attn.q_b_proj
)
# layer.self_attn.kv_b_proj = self.all_to_sharded_linear(
# layer.self_attn.kv_b_proj
# )
layer.self_attn.o_proj = self.sharded_to_all_linear(layer.self_attn.o_proj)
layer.self_attn.num_heads //= self.N
@@ -544,7 +549,7 @@ class DeepSeekShardingStrategy(TensorParallelShardingStrategy):
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.gate_proj)
self.sharded_to_all_linear_in_place(layer.mlp.switch_mlp.down_proj)
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.up_proj)
layer.mlp = ShardedDeepseekV3MoE(layer.mlp) # type: ignore
layer.mlp = ShardedMoE(layer.mlp) # type: ignore
layer.mlp.sharding_group = self.group
mx.eval(layer)
@@ -552,7 +557,9 @@ class DeepSeekShardingStrategy(TensorParallelShardingStrategy):
return model
class ShardedDeepseekV3MoE(CustomMlxLayer):
class ShardedMoE(CustomMlxLayer):
"""Wraps any MoE layer with distributed sum_gradients / all_sum."""
def __init__(self, layer: _LayerCallable):
super().__init__(layer)
self.sharding_group: mx.distributed.Group | None = None
@@ -623,27 +630,13 @@ class GLM4MoeLiteShardingStrategy(TensorParallelShardingStrategy):
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.gate_proj)
self.sharded_to_all_linear_in_place(layer.mlp.switch_mlp.down_proj)
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.up_proj)
layer.mlp = ShardedGLM4MoeLiteMoE(layer.mlp) # type: ignore
layer.mlp = ShardedMoE(layer.mlp) # type: ignore
layer.mlp.sharding_group = self.group # type: ignore
mx.eval(layer)
return model
class ShardedGLM4MoeLiteMoE(CustomMlxLayer):
def __init__(self, layer: _LayerCallable):
super().__init__(layer)
self.sharding_group: mx.distributed.Group | None = None
def __call__(self, x: mx.array) -> mx.array:
if self.sharding_group is not None:
x = sum_gradients(self.sharding_group)(x)
y = self.original_layer.__call__(x)
if self.sharding_group is not None:
y = mx.distributed.all_sum(y, group=self.sharding_group)
return y
class WrappedMiniMaxAttention(CustomMlxLayer):
def __init__(self, layer: _LayerCallable, group: mx.distributed.Group):
super().__init__(layer)
@@ -756,7 +749,7 @@ class MiniMaxShardingStrategy(TensorParallelShardingStrategy):
self.all_to_sharded_linear_in_place(
layer.block_sparse_moe.switch_mlp.up_proj
)
layer.block_sparse_moe = ShardedQwenMoE(layer.block_sparse_moe) # pyright: ignore[reportAttributeAccessIssue, reportArgumentType]
layer.block_sparse_moe = ShardedMoE(layer.block_sparse_moe) # pyright: ignore[reportAttributeAccessIssue, reportArgumentType]
layer.block_sparse_moe.sharding_group = self.group # pyright: ignore[reportAttributeAccessIssue]
mx.eval(layer)
return model
@@ -861,9 +854,7 @@ class QwenShardingStrategy(TensorParallelShardingStrategy):
# Shard the MoE. Shard in place since the MoE should be responsible
# for aggregating the results.
if isinstance(
layer.mlp, (Qwen3MoeSparseMoeBlock, MoE, Qwen3NextSparseMoeBlock)
):
if isinstance(layer.mlp, (Qwen3MoeSparseMoeBlock, Qwen3NextSparseMoeBlock)):
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.gate_proj)
self.sharded_to_all_linear_in_place(layer.mlp.switch_mlp.down_proj)
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.up_proj)
@@ -875,7 +866,7 @@ class QwenShardingStrategy(TensorParallelShardingStrategy):
layer.mlp.shared_expert.down_proj
)
self.all_to_sharded_linear_in_place(layer.mlp.shared_expert.up_proj)
layer.mlp = ShardedQwenMoE(layer.mlp) # pyright: ignore[reportAttributeAccessIssue, reportArgumentType]
layer.mlp = ShardedMoE(layer.mlp) # pyright: ignore[reportAttributeAccessIssue, reportArgumentType]
layer.mlp.sharding_group = self.group
# Shard the MLP
@@ -888,18 +879,50 @@ class QwenShardingStrategy(TensorParallelShardingStrategy):
return model
class ShardedQwenMoE(CustomMlxLayer):
def __init__(self, layer: _LayerCallable):
super().__init__(layer)
self.sharding_group: mx.distributed.Group | None = None
class Glm4MoeShardingStrategy(TensorParallelShardingStrategy):
def shard_model(
self,
model: nn.Module,
timeout_seconds: float,
on_timeout: TimeoutCallback | None,
) -> nn.Module:
model = cast(Glm4MoeModel, model)
for layer in model.layers:
eval_with_timeout(
layer.parameters(), timeout_seconds / len(model.layers), on_timeout
)
def __call__(self, x: mx.array) -> mx.array:
if self.sharding_group is not None:
x = sum_gradients(self.sharding_group)(x)
y = self.original_layer.__call__(x)
if self.sharding_group is not None:
y = mx.distributed.all_sum(y, group=self.sharding_group)
return y
layer.self_attn.q_proj = self.all_to_sharded_linear(layer.self_attn.q_proj)
layer.self_attn.k_proj = self.all_to_sharded_linear(layer.self_attn.k_proj)
layer.self_attn.v_proj = self.all_to_sharded_linear(layer.self_attn.v_proj)
layer.self_attn.o_proj = self.sharded_to_all_linear(layer.self_attn.o_proj)
layer.self_attn.n_heads //= self.N
layer.self_attn.n_kv_heads //= self.N
if isinstance(layer.mlp, MoE):
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.gate_proj)
self.sharded_to_all_linear_in_place(layer.mlp.switch_mlp.down_proj)
self.all_to_sharded_linear_in_place(layer.mlp.switch_mlp.up_proj)
if getattr(layer.mlp, "shared_experts", None) is not None:
self.all_to_sharded_linear_in_place(
layer.mlp.shared_experts.gate_proj
)
self.sharded_to_all_linear_in_place(
layer.mlp.shared_experts.down_proj
)
self.all_to_sharded_linear_in_place(
layer.mlp.shared_experts.up_proj
)
layer.mlp = ShardedMoE(layer.mlp) # pyright: ignore[reportAttributeAccessIssue, reportArgumentType]
layer.mlp.sharding_group = self.group
else:
layer.mlp.gate_proj = self.all_to_sharded_linear(layer.mlp.gate_proj)
layer.mlp.down_proj = self.sharded_to_all_linear(layer.mlp.down_proj)
layer.mlp.up_proj = self.all_to_sharded_linear(layer.mlp.up_proj)
mx.eval(layer)
return model
class GptOssShardingStrategy(TensorParallelShardingStrategy):
@@ -937,21 +960,7 @@ class GptOssShardingStrategy(TensorParallelShardingStrategy):
self.sharded_to_all_linear_in_place(layer.mlp.experts.down_proj)
self.all_to_sharded_linear_in_place(layer.mlp.experts.up_proj)
layer.mlp = ShardedGptOssMoE(layer.mlp) # type: ignore
layer.mlp = ShardedMoE(layer.mlp) # type: ignore
layer.mlp.sharding_group = self.group # pyright: ignore[reportAttributeAccessIssue]
mx.eval(layer)
return model
class ShardedGptOssMoE(CustomMlxLayer):
def __init__(self, layer: nn.Module):
super().__init__(layer)
self.sharding_group: mx.distributed.Group | None = None
def __call__(self, x: mx.array) -> mx.array:
if self.sharding_group is not None:
x = sum_gradients(self.sharding_group)(x)
y = self.original_layer(x)
if self.sharding_group is not None:
y = mx.distributed.all_sum(y, group=self.sharding_group)
return y

View File

@@ -290,7 +290,6 @@ def make_kv_cache(
) -> KVCacheType:
assert hasattr(model, "layers")
# TODO: Do this for all models
if hasattr(model, "make_cache"):
logger.info("Using MLX LM's make cache")
return model.make_cache() # type: ignore

View File

@@ -298,6 +298,9 @@ def mlx_generate(
)
max_stop_len = max((len(s) for s in stop_sequences), default=0)
mx_barrier(group)
logger.info("Ready to prefill")
# Prefill cache with all tokens except the last one
prefill_tps, prefill_tokens, ssm_snapshots_list = prefill(
model,

View File

@@ -810,8 +810,9 @@ def patch_kimi_tokenizer(tokenizer: TokenizerWrapper):
# kimi has a fixed function naming scheme, with a json formatted arg
# functions.multiply:0 <|tool_call_argument_begin|> {"a": 2, "b": 3}
# Also needs to handle tools like call_0<|tool_call_argument_begin|>{"filePath": "..."}
_func_name_regex = re.compile(
r"^\s*(.+):\d+\s*<\|tool_call_argument_begin\|>", re.DOTALL
r"^\s*(.+)[:](\d+)\s*<\|tool_call_argument_begin\|>", re.DOTALL
)
_func_arg_regex = re.compile(r"<\|tool_call_argument_begin\|>\s*(.*)\s*", re.DOTALL)
@@ -835,9 +836,10 @@ def patch_kimi_tokenizer(tokenizer: TokenizerWrapper):
func_name_match = _func_name_regex.search(text)
if func_name_match is None:
raise ValueError(f"Could not parse function name from tool call: {text!r}")
func_name = func_name_match.group(1)
original_func_name = func_name_match.group(1)
tool_id = func_name_match.group(2)
# strip off the `functions.` prefix, if it exists.
func_name = func_name[func_name.find(".") + 1 :]
func_name = original_func_name[original_func_name.find(".") + 1 :]
func_args_match = _func_arg_regex.search(text)
if func_args_match is None:
@@ -846,7 +848,11 @@ def patch_kimi_tokenizer(tokenizer: TokenizerWrapper):
# the args should be valid json - no need to check against our tools to deserialize
arg_dct = _deserialize(func_args) # pyright: ignore[reportAny]
return dict(name=func_name, arguments=arg_dct) # pyright: ignore[reportAny]
return dict(
id=f"{original_func_name}:{tool_id}",
name=func_name,
arguments=arg_dct, # pyright: ignore[reportAny]
)
tokenizer._tool_call_start = tool_call_start
tokenizer._tool_call_end = tool_call_end
@@ -929,7 +935,13 @@ def _validate_single_tool(obj: dict[str, Any]) -> ToolCallItem:
and ((args := obj.get("arguments")) is not None)
and isinstance(name, str)
):
return ToolCallItem(name=name, arguments=json.dumps(args))
raw_id: object = obj.get("id")
extra = {"id": str(raw_id)} if raw_id is not None else {}
return ToolCallItem(
**extra,
name=name,
arguments=json.dumps(args),
)
else:
raise ValidationError

377
tmp/quantize_and_upload.py Executable file
View File

@@ -0,0 +1,377 @@
#!/usr/bin/env python3
"""
Download an mflux model, quantize it, and upload to HuggingFace.
Usage (run from mflux project directory):
cd /path/to/mflux
uv run python /path/to/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev
uv run python /path/to/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev --skip-base --skip-8bit
uv run python /path/to/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev --dry-run
Requires:
- Must be run from mflux project directory using `uv run`
- huggingface_hub installed (add to mflux deps or install separately)
- HuggingFace authentication: run `huggingface-cli login` or set HF_TOKEN
"""
from __future__ import annotations
import argparse
import re
import shutil
import sys
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from mflux.models.flux.variants.txt2img.flux import Flux1
HF_ORG = "exolabs"
def get_model_class(model_name: str) -> type:
"""Get the appropriate model class based on model name."""
from mflux.models.fibo.variants.txt2img.fibo import FIBO
from mflux.models.flux.variants.txt2img.flux import Flux1
from mflux.models.flux2.variants.txt2img.flux2_klein import Flux2Klein
from mflux.models.qwen.variants.txt2img.qwen_image import QwenImage
from mflux.models.z_image.variants.turbo.z_image_turbo import ZImageTurbo
model_name_lower = model_name.lower()
if "qwen" in model_name_lower:
return QwenImage
elif "fibo" in model_name_lower:
return FIBO
elif "z-image" in model_name_lower or "zimage" in model_name_lower:
return ZImageTurbo
elif "flux2" in model_name_lower or "flux.2" in model_name_lower:
return Flux2Klein
else:
return Flux1
def get_repo_name(model_name: str, bits: int | None) -> str:
"""Get the HuggingFace repo name for a model variant."""
# Extract repo name from HF path (e.g., "black-forest-labs/FLUX.1-Kontext-dev" -> "FLUX.1-Kontext-dev")
base_name = model_name.split("/")[-1] if "/" in model_name else model_name
suffix = f"-{bits}bit" if bits else ""
return f"{HF_ORG}/{base_name}{suffix}"
def get_local_path(output_dir: Path, model_name: str, bits: int | None) -> Path:
"""Get the local save path for a model variant."""
# Extract repo name from HF path (e.g., "black-forest-labs/FLUX.1-Kontext-dev" -> "FLUX.1-Kontext-dev")
base_name = model_name.split("/")[-1] if "/" in model_name else model_name
suffix = f"-{bits}bit" if bits else ""
return output_dir / f"{base_name}{suffix}"
def copy_source_repo(
source_repo: str,
local_path: Path,
dry_run: bool = False,
) -> None:
"""Copy all files from source repo (replicating original HF structure)."""
print(f"\n{'=' * 60}")
print(f"Copying full repo from source: {source_repo}")
print(f"Output path: {local_path}")
print(f"{'=' * 60}")
if dry_run:
print("[DRY RUN] Would download all files from source repo")
return
from huggingface_hub import snapshot_download
# Download all files to our local path
snapshot_download(
repo_id=source_repo,
local_dir=local_path,
)
# Remove root-level safetensors files (flux.1-dev.safetensors, etc.)
# These are redundant with the component directories
for f in local_path.glob("*.safetensors"):
print(f"Removing root-level safetensors: {f.name}")
if not dry_run:
f.unlink()
print(f"Source repo copied to {local_path}")
def load_and_save_quantized_model(
model_name: str,
bits: int,
output_path: Path,
dry_run: bool = False,
) -> None:
"""Load a model with quantization and save it in mflux format."""
print(f"\n{'=' * 60}")
print(f"Loading {model_name} with {bits}-bit quantization...")
print(f"Output path: {output_path}")
print(f"{'=' * 60}")
if dry_run:
print("[DRY RUN] Would load and save quantized model")
return
from mflux.models.common.config.model_config import ModelConfig
model_class = get_model_class(model_name)
model_config = ModelConfig.from_name(model_name=model_name, base_model=None)
model: Flux1 = model_class(
quantize=bits,
model_config=model_config,
)
print(f"Saving model to {output_path}...")
model.save_model(str(output_path))
print(f"Model saved successfully to {output_path}")
def copy_source_metadata(
source_repo: str,
local_path: Path,
dry_run: bool = False,
) -> None:
"""Copy metadata files (LICENSE, README, etc.) from source repo, excluding safetensors."""
print(f"\n{'=' * 60}")
print(f"Copying metadata from source repo: {source_repo}")
print(f"{'=' * 60}")
if dry_run:
print("[DRY RUN] Would download metadata files (excluding *.safetensors)")
return
from huggingface_hub import snapshot_download
# Download all files except safetensors to our local path
snapshot_download(
repo_id=source_repo,
local_dir=local_path,
ignore_patterns=["*.safetensors"],
)
print(f"Metadata files copied to {local_path}")
def upload_to_huggingface(
local_path: Path,
repo_id: str,
dry_run: bool = False,
clean_remote: bool = False,
) -> None:
"""Upload a saved model to HuggingFace."""
print(f"\n{'=' * 60}")
print(f"Uploading to HuggingFace: {repo_id}")
print(f"Local path: {local_path}")
print(f"Clean remote first: {clean_remote}")
print(f"{'=' * 60}")
if dry_run:
print("[DRY RUN] Would upload to HuggingFace")
return
from huggingface_hub import HfApi
api = HfApi()
# Create the repo if it doesn't exist
print(f"Creating/verifying repo: {repo_id}")
api.create_repo(repo_id=repo_id, repo_type="model", exist_ok=True)
# Clean remote repo if requested (delete old mflux-format files)
if clean_remote:
print("Cleaning old mflux-format files from remote...")
try:
# Pattern for mflux numbered shards: <dir>/<number>.safetensors
numbered_pattern = re.compile(r".*/\d+\.safetensors$")
repo_files = api.list_repo_files(repo_id=repo_id, repo_type="model")
for file_path in repo_files:
# Delete numbered safetensors (mflux format) and mflux index files
if numbered_pattern.match(file_path) or file_path.endswith(
"/model.safetensors.index.json"
):
print(f" Deleting: {file_path}")
api.delete_file(
path_in_repo=file_path, repo_id=repo_id, repo_type="model"
)
except Exception as e:
print(f"Warning: Could not clean remote files: {e}")
# Upload the folder
print("Uploading folder contents...")
api.upload_folder(
folder_path=str(local_path),
repo_id=repo_id,
repo_type="model",
)
print(f"Upload complete: https://huggingface.co/{repo_id}")
def clean_local_files(local_path: Path, dry_run: bool = False) -> None:
"""Remove local model files after upload."""
print(f"\nCleaning up: {local_path}")
if dry_run:
print("[DRY RUN] Would remove local files")
return
if local_path.exists():
shutil.rmtree(local_path)
print(f"Removed {local_path}")
def main() -> int:
parser = argparse.ArgumentParser(
description="Download an mflux model, quantize it, and upload to HuggingFace.",
formatter_class=argparse.RawDescriptionHelpFormatter,
epilog="""
Examples:
# Process all variants (base, 4-bit, 8-bit) for FLUX.1-Kontext-dev
python tmp/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev
# Only process 4-bit variant
python tmp/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev --skip-base --skip-8bit
# Save locally without uploading
python tmp/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev --skip-upload
# Preview what would happen
python tmp/quantize_and_upload.py --model black-forest-labs/FLUX.1-Kontext-dev --dry-run
""",
)
parser.add_argument(
"--model",
"-m",
required=True,
help="HuggingFace model path (e.g., black-forest-labs/FLUX.1-Kontext-dev)",
)
parser.add_argument(
"--output-dir",
type=Path,
default=Path("./tmp/models"),
help="Local directory to save models (default: ./tmp/models)",
)
parser.add_argument(
"--skip-base",
action="store_true",
help="Skip base model (no quantization)",
)
parser.add_argument(
"--skip-4bit",
action="store_true",
help="Skip 4-bit quantized model",
)
parser.add_argument(
"--skip-8bit",
action="store_true",
help="Skip 8-bit quantized model",
)
parser.add_argument(
"--skip-download",
action="store_true",
help="Skip downloading/processing, only do upload/clean operations",
)
parser.add_argument(
"--skip-upload",
action="store_true",
help="Only save locally, don't upload to HuggingFace",
)
parser.add_argument(
"--clean",
action="store_true",
help="Remove local files after upload",
)
parser.add_argument(
"--clean-remote",
action="store_true",
help="Delete old mflux-format files from remote repo before uploading",
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Print actions without executing",
)
args = parser.parse_args()
# Determine which variants to process
variants: list[int | None] = []
if not args.skip_base:
variants.append(None) # Base model (no quantization)
if not args.skip_4bit:
variants.append(4)
if not args.skip_8bit:
variants.append(8)
if not variants:
print("Error: All variants skipped. Nothing to do.")
return 1
# Create output directory
args.output_dir.mkdir(parents=True, exist_ok=True)
print(f"Model: {args.model}")
print(f"Output directory: {args.output_dir}")
print(
f"Variants to process: {['base' if v is None else f'{v}-bit' for v in variants]}"
)
print(f"Upload to HuggingFace: {not args.skip_upload}")
print(f"Clean after upload: {args.clean}")
if args.dry_run:
print("\n*** DRY RUN MODE - No actual changes will be made ***")
# Process each variant
for bits in variants:
local_path = get_local_path(args.output_dir, args.model, bits)
repo_id = get_repo_name(args.model, bits)
if not args.skip_download:
if bits is None:
# Base model: copy original HF repo structure (no mflux conversion)
copy_source_repo(
source_repo=args.model,
local_path=local_path,
dry_run=args.dry_run,
)
else:
# Quantized model: load, quantize, and save with mflux
load_and_save_quantized_model(
model_name=args.model,
bits=bits,
output_path=local_path,
dry_run=args.dry_run,
)
# Copy metadata from source repo (LICENSE, README, etc.)
copy_source_metadata(
source_repo=args.model,
local_path=local_path,
dry_run=args.dry_run,
)
# Upload
if not args.skip_upload:
upload_to_huggingface(
local_path=local_path,
repo_id=repo_id,
dry_run=args.dry_run,
clean_remote=args.clean_remote,
)
# Clean up if requested
if args.clean:
clean_local_files(local_path, dry_run=args.dry_run)
print("\n" + "=" * 60)
print("All done!")
print("=" * 60)
return 0
if __name__ == "__main__":
sys.exit(main())

20
uv.lock generated
View File

@@ -192,20 +192,14 @@ sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8
wheels = [
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
@@ -311,10 +305,8 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
@@ -322,10 +314,8 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
{ url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
{ url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
{ url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
{ url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
{ url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
{ url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
{ url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
{ url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
{ url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
@@ -333,10 +323,8 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
@@ -412,7 +400,7 @@ requires-dist = [
{ name = "huggingface-hub", specifier = ">=0.33.4" },
{ name = "hypercorn", specifier = ">=0.18.0" },
{ name = "loguru", specifier = ">=0.7.3" },
{ name = "mflux", specifier = "==0.15.4" },
{ name = "mflux", specifier = "==0.15.5" },
{ name = "mlx", marker = "sys_platform == 'darwin'", specifier = "==0.30.5" },
{ name = "mlx", extras = ["cpu"], marker = "sys_platform == 'linux'", specifier = "==0.30.5" },
{ name = "mlx-lm", specifier = "==0.30.6" },
@@ -987,7 +975,7 @@ wheels = [
[[package]]
name = "mflux"
version = "0.15.4"
version = "0.15.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "filelock", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
@@ -1013,9 +1001,9 @@ dependencies = [
{ name = "twine", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
{ name = "urllib3", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/95322db7a865e4df6bad108b1c99aa7fbe211aac3f298f3ad696c2744a39/mflux-0.15.4.tar.gz", hash = "sha256:138e1aedae86e13eafeb8faec017945fcdcca42c3234daabcd81a83c9a202ace", size = 741228, upload-time = "2026-01-20T15:39:26.807Z" }
sdist = { url = "https://files.pythonhosted.org/packages/35/8e/f20de51bf9dc0a986535d9a825db4ae314163421b3d3ddaa90a2b959b9fd/mflux-0.15.5.tar.gz", hash = "sha256:9a3372bd64d51c4caff4ff9e7d7d698bea5833242fd849c59cbb0c92f7d7aa3b", size = 743700, upload-time = "2026-01-26T12:41:45.272Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/8e/be/81cf4ce2d1933b9b210c028a05ac95e958008c0d43e377a5f2757b7f2d4d/mflux-0.15.4-py3-none-any.whl", hash = "sha256:f04d9b1d7c5cd67880f483ab29fb2097648a25459eef9c5ee6480fad46de5e82", size = 987644, upload-time = "2026-01-20T15:39:24.817Z" },
{ url = "https://files.pythonhosted.org/packages/ac/bb/ef936eae2ae78a47cd92ddffc18fc06ad3fd5f438a0915fb62d8bb9508ec/mflux-0.15.5-py3-none-any.whl", hash = "sha256:c94891d4a518047a818863bb099c755e93af90c524ced358baf5b31502c09e82", size = 990939, upload-time = "2026-01-26T12:41:42.898Z" },
]
[[package]]