Swift Examples - Fal

Service setup

Create a Fal service in the AIProxy dashboard

Follow the integration guide, selecting the Fal icon on the 'Create a New Service' form.

How to generate a FastSDXL image using Fal

import AIProxy

/* Uncomment for BYOK use cases */
// let falService = AIProxy.falDirectService(
//     unprotectedAPIKey: "your-fal-key"
// )

/* Uncomment for all other production use cases */
// let falService = AIProxy.falService(
//     partialKey: "partial-key-from-your-developer-dashboard",
//     serviceURL: "service-url-from-your-developer-dashboard"
// )

let input = FalFastSDXLInputSchema(
    prompt: "Yosemite Valley",
    enableSafetyChecker: false
)
do {
    let output = try await falService.createFastSDXLImage(input: input)
    print("""
            The first output image is at \(output.images?.first?.url?.absoluteString ?? "")
            It took \(output.timings?.inference ?? Double.nan) seconds to generate.
            """)
}  catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
    print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
    print("Could not create Fal SDXL image: \(error.localizedDescription)")
}

See the full range of controls for generating an image by viewing FalFastSDXLInputSchema.swift.

How to use the fashn/tryon model on Fal

  1. A remote URL to the image hosted on a public site
  2. A local data URL that you construct using `AIProxy.encodeImageAsURL`
import AIProxy
/* Uncomment for BYOK use cases */
// let falService = AIProxy.falDirectService(
//     unprotectedAPIKey: "your-fal-key"
// )

/* Uncomment for all other production use cases */
// let falService = AIProxy.falService(
//     partialKey: "partial-key-from-your-developer-dashboard",
//     serviceURL: "service-url-from-your-developer-dashboard"
// )

guard let garmentImage = NSImage(named: "garment-image"),
      let garmentImageURL = AIProxy.encodeImageAsURL(image: garmentImage
	                                                 compressionQuality: 0.6) else {
    print("Could not find an image named 'garment-image' in your app assets")
    return
}

guard let modelImage = NSImage(named: "model-image"),
      let modelImageURL = AIProxy.encodeImageAsURL(image: modelImage
	                                               compressionQuality: 0.6) else {
    print("Could not find an image named 'model-image' in your app assets")
    return
}

let input = FalTryonInputSchema(
    category: .tops,
    garmentImage: garmentImageURL,
    modelImage: modelImageURL
)
do {
    let output = try await falService.createTryonImage(input: input)
    print("Tryon image is available at: \(output.images.first?.url.absoluteString ?? "No URL")")
} catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
    print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
    print("Could not create fashn/tryon image on Fal: \(error.localizedDescription)")
}

How to train Flux on your own images using Fal

Upload training data to Fal

Your training data must be a zip file of images. You can either pull the zip from assets (what I do here), or construct the zip in memory:

import AIProxy

/* Uncomment for BYOK use cases */
// let falService = AIProxy.falDirectService(
//     unprotectedAPIKey: "your-fal-key"
// )

/* Uncomment for all other production use cases */
// let falService = AIProxy.falService(
//     partialKey: "partial-key-from-your-developer-dashboard",
//     serviceURL: "service-url-from-your-developer-dashboard"
// )

// Get the images to train with:
guard let trainingData = NSDataAsset(name: "training") else {
    print("Drop training.zip file into Assets first")
    return
}

do {
    let url = try await falService.uploadTrainingZipFile(
        zipData: trainingData.data,
        name: "training.zip"
    )
    print("Training file uploaded. Find it at \(url.absoluteString)")
}  catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
    print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
    print("Could not upload file to Fal: \(error.localizedDescription)")
}

Train fal-ai/flux-lora-fast-training using your uploaded data

Using the URL returned in the step above:

let input = FalFluxLoRAFastTrainingInputSchema(
    imagesDataURL: <url-from-step-above>
    triggerWord: "face"
)
do {
    let output = try await falService.createFluxLoRAFastTraining(input: input)
    print("""
            Fal's Flux LoRA fast trainer is complete.
            Your weights are at: \(output.diffusersLoraFile?.url?.absoluteString ?? "")
            """)
}  catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
    print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
    print("Could not create Fal Flux training: \(error.localizedDescription)")
}

See FalFluxLoRAFastTrainingInputSchema.swift for the full range of training controls.


Run inference on your trained model

Using the LoRA URL returned in the step above:

let inputSchema = FalFluxLoRAInputSchema(
    prompt: "face on a blimp over Monument Valley, Utah",
    loras: [
        .init(
            path: <lora-url-from-step-above>
            scale: 0.9
        )
    ],
    numImages: 2,
    outputFormat: .jpeg
)
do {
    let output = try await falService.createFluxLoRAImage(input: inputSchema)
    print("""
            Fal's Flux LoRA inference is complete.
            Your images are at: \(output.images?.compactMap {$0.url?.absoluteString} ?? [])
            """)
}  catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
    print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
} catch {
    print("Could not create Fal LoRA image: \(error.localizedDescription)")
}

See FalFluxLoRAInputSchema.swift for the full range of inference controls