Filecatalyst | Workload Automation

fta-cli --log-level DEBUG --log-file /var/log/fc_workload.log --put file.dat

def main(): files_to_send = ["/data/file1.bin", "/data/file2.bin"] for f in files_to_send: # Pre-processing: compute hash with open(f, "rb") as fp: original_hash = hashlib.sha256(fp.read()).hexdigest()

success = run_fta(f, "/incoming/", "fc-server.company.com", "auto", "secret") if success: logging.info(f"Success: f") # Post-processing: log to database subprocess.run(["psql", "-c", f"INSERT INTO transfers VALUES('f', 'original_hash')"]) else: logging.error(f"Failed: f") time.sleep(30) # Backoff before retry if == " main ": main() Summary Table: Choosing an Automation Method | Requirement | Recommended Method | |-------------|--------------------| | Simple directory watching | Hotfolder | | Scripted, scheduled transfers | CLI + cron/systemd timer | | Complex workflow with multiple steps | CLI + Bash/Python logic | | Integration with Airflow/Jenkins | REST API or BashOperator | | Central management of many transfers | REST API + custom dashboard | filecatalyst workload automation

Use a script that scrapes API and exposes metrics:

from airflow import DAG from airflow.operators.bash import BashOperator from datetime import datetime default_args = 'retries': 3 with DAG('fc_transfer_dag', start_date=datetime(2024,1,1), schedule='0 2 * * *', default_args=default_args) as dag: transfer = BashOperator( task_id='send_to_fc', bash_command='fta-cli --server fc.prod.com --put /daily/report.csv --target /archive/' ) Enable detailed logs: fta-cli --log-level DEBUG --log-file /var/log/fc_workload

def get_queue_depth(): resp = requests.get("http://fc-server:8080/api/transfers?status=PENDING") return len(resp.json()) if get_queue_depth() > 50: alert("FileCatalyst backlog critical")

import requests import time API_BASE = "http://fc-server:8080/api" API_KEY = "your-api-key" def run_transfer(local_path, remote_path): payload = "source": local_path, "destination": remote_path, "server": "destination-host", "username": "transfer_user", "password": "secret" f"INSERT INTO transfers VALUES('f'

#!/bin/bash # workload_processor.sh # Step 1: Compress files tar -czf /data/prepared/batch1.tar.gz /data/raw/*.csv fta-cli --server fc.example.com --port 11001 --username auto_user --put /data/prepared/batch1.tar.gz --target /incoming/ Step 3: Verify success (check exit code) if [ $? -eq 0 ]; then echo "Transfer successful, triggering downstream API" curl -X POST https://processing.api/start -d '"file":"batch1.tar.gz"' else echo "Transfer failed" >> /var/log/fc_errors.log fi Method B: Hotfolders – Best for Simple, Event-Driven Workloads Configure hotfolder.properties to watch a directory. Any file dropped is automatically transferred.