Datasets:
Modalities:
Geospatial
License:
File size: 9,070 Bytes
82421a4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 |
import os
import zipfile
import geopandas as gpd
import pandas as pd
from tqdm import tqdm # For progress bars
import warnings
import multiprocessing as mp
import sys # Import the sys module
# Ignore specific warnings
warnings.filterwarnings("ignore", category=RuntimeWarning,
message="driver GML does not support open option DRIVER")
warnings.filterwarnings("ignore", category=RuntimeWarning,
message="Non closed ring detected. To avoid accepting it, set the OGR_GEOMETRY_ACCEPT_UNCLOSED_RING configuration option to NO")
def process_region(region_zip_path, output_dir):
"""Processes a single region zip file to extract and save commune and parcel data."""
region_name = os.path.basename(region_zip_path).replace(".zip", "") # Extract region name from filename.
all_communes = []
all_parcels = []
try:
with zipfile.ZipFile(region_zip_path, 'r') as region_zip:
city_zip_names = [f.filename for f in region_zip.filelist if f.filename.endswith('.zip')]
for city_zip_name in city_zip_names:
city_zip_path = region_zip.open(city_zip_name)
try:
with zipfile.ZipFile(city_zip_path, 'r') as city_zip:
commune_zip_names = [f.filename for f in city_zip.filelist if f.filename.endswith('.zip')]
for commune_zip_name in commune_zip_names:
try:
commune_zip_path = city_zip.open(commune_zip_name)
with zipfile.ZipFile(commune_zip_path, 'r') as commune_zip:
# Find GML files
gml_files = [f.filename for f in commune_zip.filelist if
f.filename.endswith('.gml')]
commune_gml = next((f for f in gml_files if '_map.gml' in f),
None) # Find map.gml
parcel_gml = next((f for f in gml_files if '_ple.gml' in f),
None) # Find ple.gml
if commune_gml:
try:
commune_gdf = gpd.read_file(commune_zip.open(commune_gml),
driver='GML')
all_communes.append(commune_gdf)
except Exception as e:
print(
f"Error reading commune GML {commune_gml} from {commune_zip_name}: {e}")
if parcel_gml:
try:
parcel_gdf = gpd.read_file(commune_zip.open(parcel_gml),
driver='GML')
all_parcels.append(parcel_gdf)
except Exception as e:
print(
f"Error reading parcel GML {parcel_gml} from {commune_zip_name}: {e}")
except zipfile.BadZipFile as e:
print(f"Bad Zip file encountered: {commune_zip_name} - {e}")
except Exception as e:
print(f"Error processing {commune_zip_name}: {e}")
except zipfile.BadZipFile as e:
print(f"Bad Zip file encountered: {city_zip_name} - {e}")
except Exception as e:
print(f"Error processing {city_zip_name}: {e}")
except zipfile.BadZipFile as e:
print(f"Bad Zip file encountered: {region_zip_name} - {e}")
except Exception as e:
print(f"Error processing {region_zip_name}: {e}")
# Concatenate and save for the region
try:
if all_communes:
communes_gdf = gpd.GeoDataFrame(pd.concat(all_communes, ignore_index=True))
# handle crs here.
if all_communes and hasattr(all_communes[0], 'crs') and all_communes[0].crs: # Check if not empty list
try:
communes_gdf.crs = all_communes[0].crs
except AttributeError as e:
print(f"Could not set CRS: {e}")
else:
print("WARNING: CRS information is missing from the input data.")
# Identify and convert problematic columns to strings
problem_columns = []
for col in communes_gdf.columns:
if col != 'geometry':
try:
communes_gdf[col] = pd.to_numeric(communes_gdf[col], errors='raise')
except (ValueError, TypeError):
problem_columns.append(col)
for col in problem_columns:
communes_gdf[col] = communes_gdf[col].astype(str)
# Try to set the geometry
if 'msGeometry' in communes_gdf.columns:
communes_gdf = communes_gdf.set_geometry('msGeometry')
elif 'geometry' in communes_gdf.columns:
communes_gdf = communes_gdf.set_geometry('geometry') # Already the default, but be explicit
else:
print(
"WARNING: No 'geometry' or 'msGeometry' column found in commune data. Spatial operations will not work.")
communes_gdf.to_parquet(
os.path.join(output_dir, f"{region_name}_communes.geoparquet"),
compression='gzip')
print(
f"Successfully saved {region_name} communes to {output_dir}/{region_name}_communes.geoparquet")
if all_parcels:
parcels_gdf = gpd.GeoDataFrame(pd.concat(all_parcels, ignore_index=True))
# handle crs here.
if all_parcels and hasattr(all_parcels[0], 'crs') and all_parcels[0].crs:
try:
parcels_gdf.crs = all_parcels[0].crs
except AttributeError as e:
print(f"Could not set CRS: {e}")
else:
print("WARNING: CRS information is missing from the input data.")
# Identify and convert problematic columns to strings
problem_columns = []
for col in parcels_gdf.columns:
if col != 'geometry':
try:
parcels_gdf[col] = pd.to_numeric(parcels_gdf[col], errors='raise')
except (ValueError, TypeError):
problem_columns.append(col)
for col in problem_columns:
parcels_gdf[col] = parcels_gdf[col].astype(str)
# Try to set the geometry
if 'msGeometry' in parcels_gdf.columns:
parcels_gdf = parcels_gdf.set_geometry('msGeometry')
elif 'geometry' in parcels_gdf.columns:
parcels_gdf = parcels_gdf.set_geometry('geometry') # Already the default, but be explicit
else:
print(
"WARNING: No 'geometry' or 'msGeometry' column found in parcel data. Spatial operations will not work.")
parcels_gdf.to_parquet(os.path.join(output_dir, f"{region_name}_parcels.geoparquet"),
compression='gzip')
print(
f"Successfully saved {region_name} parcels to {output_dir}/{region_name}_parcels.geoparquet")
except Exception as e:
print(f"Error saving GeoParquet files for {region_name}: {e}")
def process_italy_data_unzipped_parallel(root_dir, output_dir, num_processes=mp.cpu_count()):
"""
Processes the Italian data in parallel, leveraging multiprocessing.
"""
os.makedirs(output_dir, exist_ok=True)
region_zip_paths = [os.path.join(root_dir, f) for f in os.listdir(root_dir) if f.endswith('.zip')]
total_regions = len(region_zip_paths)
# Add this block to protect the entry point
if __name__ == '__main__':
# For macOS, you might need to set the start method to 'spawn'
if sys.platform == 'darwin':
mp.set_start_method('spawn')
with mp.Pool(processes=num_processes) as pool:
# Use pool.starmap to pass multiple arguments to process_region
results = list(tqdm(pool.starmap(process_region, [(region_zip_path, output_dir) for region_zip_path in region_zip_paths]), total=total_regions, desc="Overall Progress: Regions"))
# Example Usage:
root_dir = "ITALIA" # Path to the ITALIA directory
output_dir = "output" # Path to save the GeoParquet files
num_processes = mp.cpu_count() # Use all available CPU cores
process_italy_data_unzipped_parallel(root_dir, output_dir, num_processes) |