diff --git a/README.md b/README.md
index 415b2f26ec7df51c54d51499f5eaa8b2ec124fb8..e4253f983feba173c5237bd0e863c266bd5a7dda 100644
--- a/README.md
+++ b/README.md
@@ -67,7 +67,7 @@ docker run -it \
     --volume <DATABASES_DIR>:/root/public_databases \
     --gpus all \
     alphafold3 \
-python run_alphafold.py \
+    python run_alphafold.py \
     --json_path=/root/af_input/fold_input.json \
     --model_dir=/root/models \
     --output_dir=/root/af_output
diff --git a/docs/installation.md b/docs/installation.md
index 70eff712655c3325fe9a7e4dcbbe551e6437b078..de855ed2fbcd988fabe83d4a50a8304630e4c344 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -270,7 +270,7 @@ docker run -it \
     --volume <DATABASES_DIR>:/root/public_databases \
     --gpus all \
     alphafold3 \
-python run_alphafold.py \
+    python run_alphafold.py \
     --json_path=/root/af_input/fold_input.json \
     --model_dir=/root/models \
     --output_dir=/root/af_output
@@ -348,7 +348,7 @@ singularity exec \
      --bind <MODEL_PARAMETERS_DIR>:/root/models \
      --bind <DATABASES_DIR>:/root/public_databases \
      alphafold3.sif \
-python alphafold3/run_alphafold.py \
+     python alphafold3/run_alphafold.py \
      --json_path=/root/af_input/fold_input.json \
      --model_dir=/root/models \
      --db_dir=/root/public_databases \
diff --git a/fetch_databases.py b/fetch_databases.py
index 28726ead07e93c34c1f7fbfa0efe406e942d67b6..1f4c08e9a499cd83f7d79133d2ff1bc2f4ad37dc 100644
--- a/fetch_databases.py
+++ b/fetch_databases.py
@@ -18,6 +18,7 @@ import argparse
 import concurrent.futures
 import functools
 import os
+import pathlib
 import subprocess
 import sys
 
@@ -34,15 +35,15 @@ DATABASE_FILES = (
     'uniref90_2022_05.fa.zst',
 )
 
-BUCKET_PATH = 'https://storage.googleapis.com/alphafold-databases/v3.0'
+BUCKET_URL = 'https://storage.googleapis.com/alphafold-databases/v3.0'
 
 
 def download_and_decompress(
-    filename: str, *, bucket_path: str, download_destination: str
+    filename: str, *, bucket_url: str, download_destination: pathlib.Path
 ) -> None:
   """Downloads and decompresses a ztsd-compressed file."""
   print(
-      f'STARTING download {filename} from {bucket_path} to'
+      f'STARTING download {filename} from {bucket_url} to'
       f' {download_destination}'
   )
   # Continue (`continue-at -`) for resumability of a partially downloaded file.
@@ -55,8 +56,8 @@ def download_and_decompress(
           'curl',
           '--progress-bar',
           *('--continue-at', '-'),
-          *('--output', f'{download_destination}/{filename}'),
-          f'{bucket_path}/{filename}',
+          *('--output', str(download_destination / filename)),
+          f'{bucket_url}/{filename}',
           *('--stderr', '/dev/stdout'),
       ),
       check=True,
@@ -66,7 +67,7 @@ def download_and_decompress(
       universal_newlines=True,
   )
   print(
-      f'FINISHED downloading {filename} from {bucket_path} to'
+      f'FINISHED downloading {filename} from {bucket_url} to'
       f' {download_destination}.'
   )
 
@@ -91,15 +92,18 @@ def main(argv=('',)) -> None:
   )
   args = parser.parse_args(argv)
 
-  if os.geteuid() != 0 and args.download_destination.startswith('/srv'):
-    raise ValueError(
-        'You must run this script as root to be able to write to /srv.'
-    )
-
-  destination = os.path.expanduser(args.download_destination)
+  destination = pathlib.Path(os.path.expanduser(args.download_destination))
 
-  print(f'Downloading all data to: {destination}')
-  os.makedirs(destination, exist_ok=True)
+  print(f'Downloading all data to: {str(destination)}')
+  try:
+    destination.mkdir(exist_ok=True)
+    if not os.access(destination, os.W_OK):
+      raise PermissionError()
+  except PermissionError as e:
+    raise PermissionError(
+        'You do not have write permissions to the destination directory'
+        f' {destination}.'
+    ) from e
 
   # Download each of the files and decompress them in parallel.
   with concurrent.futures.ThreadPoolExecutor(
@@ -109,7 +113,7 @@ def main(argv=('',)) -> None:
         pool.map(
             functools.partial(
                 download_and_decompress,
-                bucket_path=BUCKET_PATH,
+                bucket_url=BUCKET_URL,
                 download_destination=destination,
             ),
             DATABASE_FILES,
@@ -118,7 +122,7 @@ def main(argv=('',)) -> None:
 
   # Delete all zstd files at the end (after successfully decompressing them).
   for filename in DATABASE_FILES:
-    os.remove(f'{args.download_destination}/{filename}')
+    os.remove(destination / filename)
 
   print('All databases have been downloaded and decompressed.')
 
diff --git a/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_1024.pkl b/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_1024.pkl
index 36eac1270ab5f4743659da688f86cf47f068ca3a..1a39958c9a8cb00efb4daa10c39a76339a516626 100644
Binary files a/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_1024.pkl and b/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_1024.pkl differ
diff --git a/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_default.pkl b/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_default.pkl
index c96d815c6812a3e755f63cd101596e2fb9fb57e5..34d2df059110dd526dceffce97da52445e12bea1 100644
Binary files a/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_default.pkl and b/src/alphafold3/test_data/alphafold_run_outputs/run_alphafold_test_output_bucket_default.pkl differ
diff --git a/src/alphafold3/test_data/featurised_example.pkl b/src/alphafold3/test_data/featurised_example.pkl
index a1030d8fa2656ab37ea1aa5ed7706df8f6983775..dd08bf5e428e9a156a2f03e3afb1783b9acc8a65 100644
Binary files a/src/alphafold3/test_data/featurised_example.pkl and b/src/alphafold3/test_data/featurised_example.pkl differ