Skip to content

Commit

Permalink
removed local paths
Browse files Browse the repository at this point in the history
  • Loading branch information
FabioLddo committed Apr 9, 2024
1 parent 3572c4d commit 49d10ad
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 21 deletions.
18 changes: 2 additions & 16 deletions src/create_download_biome8.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,22 +33,8 @@ def main():
if href and href.endswith(".tar.gz"): # Modify the condition based on your specific requirements
download_links.append(href)

# # Generate the .sh file with the wget and tar commands
# with open("cloud_coverage_TOELT_SUPSI/src/download_landsat8_biome.sh", "w") as file:
# file.write("#!/bin/bash\n")
# file.write("# Auto-generated script to download and extract files\n")
# file.write("\n")
# # file.write("mkdir -p /path/to/extracted/files\n") # Replace '/path/to/extracted/files' with the desired extraction folder
# file.write("\n")
# for link in download_links:
# file.write(f"wget {link}\n")
# filename = link.split("/")[-1]
# file.write(f"tar zxvf {filename} -C /home/floddo/cloud_coverage_TOELT_SUPSI/Data/L8_Biome\n")
# file.write(f"rm {filename}\n") # Optional: Remove the downloaded .tar.gz file after extraction


# Generate the .sh file with the wget and tar commands
with open("cloud_coverage_TOELT_SUPSI/src/download_landsat8_biome.sh", "w") as file:
with open("src/download_landsat8_biome.sh", "w") as file:
file.write("#!/bin/bash\n")
file.write("# Auto-generated script to download and extract files\n")
file.write("\n")
Expand All @@ -68,7 +54,7 @@ def main():

file.write(f"wget {link}\n")
filename = link.split("/")[-1]
file.write(f"tar zxvf {filename} -C {biome_raw_dir}/{current_biome}\n")
file.write(f"tar zxvf {filename} -C {biome_raw_dir.as_posix()}/{current_biome}\n")
file.write(f"rm {filename}\n") # Optional: Remove the downloaded .tar.gz file after extraction


Expand Down
4 changes: 2 additions & 2 deletions src/download_S2_mlhub.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
# Auto-generated script to download and extract files

wget https://mlhub.earth/archive/ref_cloud_cover_detection_challenge_v1_test_source
# tar zxvf ref_cloud_cover_detection_challenge_v1_test_source.tar.gz -C /home/floddo/cloud_coverage_TOELT_SUPSI/Data/S2_mlhub
# tar zxvf ref_cloud_cover_detection_challenge_v1_test_source.tar.gz -C ../Data/S2_mlhub
# rm ref_cloud_cover_detection_challenge_v1_test_source.tar.gz
wget https://mlhub.earth/archive/ref_cloud_cover_detection_challenge_v1_test_labels
# tar zxvf ref_cloud_cover_detection_challenge_v1_test_labels.tar.gz -C /home/floddo/cloud_coverage_TOELT_SUPSI/Data/S2_mlhub
# tar zxvf ref_cloud_cover_detection_challenge_v1_test_labels.tar.gz -C ../Data/S2_mlhub
# rm ref_cloud_cover_detection_challenge_v1_test_labels.tar.gz
5 changes: 2 additions & 3 deletions src/download_kaggle_95Clouds.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,5 @@
# Download the dataset from Kaggle

# kaggle datasets -h
kaggle datasets download -d sorour/38cloud-cloud-segmentation-in-satellite-images -p /home/floddo/cloud_coverage_TOELT_SUPSI/Data/L8_95Cloud --unzip
kaggle datasets download -d sorour/95cloud-cloud-segmentation-on-satellite-images -p /home/floddo/cloud_coverage_TOELT_SUPSI/Data/L8_95Cloud --unzip
# ls /home/floddo/cloud_coverage_TOELT_SUPSI/Data/L8_95Cloud
kaggle datasets download -d sorour/38cloud-cloud-segmentation-in-satellite-images -p ../Data/L8_95Cloud --unzip
kaggle datasets download -d sorour/95cloud-cloud-segmentation-on-satellite-images -p ../Data/L8_95Cloud --unzip

0 comments on commit 49d10ad

Please sign in to comment.