Colocating Sentinel-3 OLCI/SRAL and Sentinal-2 Optical Data#
In this section, we embark on a detailed exploration of colocating Sentinel-3 data with Sentinel-2 optical data. Colocation of data from these two satellite missions enables a powerful synergy, harnessing the high spatial resolution of Sentinel-2 and the comprehensive coverage and colocated altimeter data from Sentinel-3. This fusion of datasets provides a richer, more detailed perspective of Earth’s surface.
In the following sections, we will guide you through the necessary steps to identify and align these datasets.
Week 4 Materials are available here.
Step 0: Read in Functions Needed#
To streamline our data fetching and processing, we’ll first load the essential functions. These functions are identical to what we have for the data_fetching notebook in week 3. These functions essentially help you get metadata for the 2 satellites you care about.
from google.colab import drive
drive.mount('/content/drive')
Mounted at /content/drive
from datetime import datetime, timedelta
from shapely.geometry import Polygon, Point, shape
import numpy as np
import requests
import pandas as pd
from xml.etree import ElementTree as ET
import os
import json
import folium
def make_api_request(url, method="GET", data=None, headers=None):
global access_token
if not headers:
headers = {"Authorization": f"Bearer {access_token}"}
response = requests.request(method, url, json=data, headers=headers)
if response.status_code in [401, 403]:
global refresh_token
access_token = refresh_access_token(refresh_token)
headers["Authorization"] = f"Bearer {access_token}"
response = requests.request(method, url, json=data, headers=headers)
return response
def query_sentinel3_olci_arctic_data(start_date, end_date, token):
"""
Queries Sentinel-3 OLCI data within a specified time range from the Copernicus Data Space,
targeting data collected over the Arctic region.
Parameters:
start_date (str): Start date in 'YYYY-MM-DD' format.
end_date (str): End date in 'YYYY-MM-DD' format.
token (str): Access token for authentication.
Returns:
DataFrame: Contains details about the Sentinel-3 OLCI images.
"""
all_data = []
arctic_polygon = "POLYGON((-180 60, 180 60, 180 90, -180 90, -180 60))"
# arctic_polygon = (
# "POLYGON ((-81.7 71.7, -81.7 73.8, -75.1 73.8, -75.1 71.7, -81.7 71.7))"
# )
filter_string = (
f"Collection/Name eq 'SENTINEL-3' and "
f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'productType' and att/Value eq 'OL_1_EFR___') and "
f"ContentDate/Start gt {start_date}T00:00:00.000Z and ContentDate/Start lt {end_date}T23:59:59.999Z"
)
next_url = (
f"https://catalogue.dataspace.copernicus.eu/odata/v1/Products?"
f"$filter={filter_string} and "
f"OData.CSC.Intersects(area=geography'SRID=4326;{arctic_polygon}')&"
f"$top=1000"
)
headers = {"Authorization": f"Bearer {token}"}
while next_url:
response = make_api_request(next_url, headers=headers)
if response.status_code == 200:
data = response.json()["value"]
all_data.extend(data)
next_url = response.json().get("@odata.nextLink")
else:
print(f"Error fetching data: {response.status_code} - {response.text}")
break
return pd.DataFrame(all_data)
def get_access_and_refresh_token(username, password):
"""Retrieve both access and refresh tokens."""
url = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token"
data = {
"grant_type": "password",
"username": username,
"password": password,
"client_id": "cdse-public",
}
response = requests.post(url, data=data)
response.raise_for_status()
tokens = response.json()
return tokens["access_token"], tokens["refresh_token"]
def refresh_access_token(refresh_token):
"""Attempt to refresh the access token using the refresh token."""
url = "https://identity.dataspace.copernicus.eu/auth/realms/CDSE/protocol/openid-connect/token"
data = {
"grant_type": "refresh_token",
"refresh_token": refresh_token,
"client_id": "cdse-public",
}
headers = {"Content-Type": "application/x-www-form-urlencoded"}
try:
response = requests.post(url, headers=headers, data=data)
response.raise_for_status() # This will throw an error for non-2xx responses
return response.json()["access_token"]
except requests.exceptions.HTTPError as e:
print(f"Failed to refresh token: {e.response.status_code} - {e.response.text}")
if e.response.status_code == 400:
print("Refresh token invalid, attempting re-authentication...")
# Attempt to re-authenticate
username = username
password = password
# This requires securely managing the credentials, which might not be feasible in all contexts
access_token, new_refresh_token = get_access_and_refresh_token(
username, password
) # This is a placeholder
refresh_token = (
new_refresh_token # Update the global refresh token with the new one
)
return access_token
else:
raise
def download_single_product(
product_id, file_name, access_token, download_dir="downloaded_products"
):
"""
Download a single product from the Copernicus Data Space.
:param product_id: The unique identifier for the product.
:param file_name: The name of the file to be downloaded.
:param access_token: The access token for authorization.
:param download_dir: The directory where the product will be saved.
"""
# Ensure the download directory exists
os.makedirs(download_dir, exist_ok=True)
# Construct the download URL
url = (
f"https://zipper.dataspace.copernicus.eu/odata/v1/Products({product_id})/$value"
)
# Set up the session and headers
headers = {"Authorization": f"Bearer {access_token}"}
session = requests.Session()
session.headers.update(headers)
# Perform the request
response = session.get(url, headers=headers, stream=True)
# Check if the request was successful
if response.status_code == 200:
# Define the path for the output file
output_file_path = os.path.join(download_dir, file_name + ".zip")
# Stream the content to a file
with open(output_file_path, "wb") as file:
for chunk in response.iter_content(chunk_size=8192):
if chunk:
file.write(chunk)
print(f"Downloaded: {output_file_path}")
else:
print(
f"Failed to download product {product_id}. Status Code: {response.status_code}"
)
def query_sentinel3_sral_arctic_data(start_date, end_date, token):
"""
Queries Sentinel-3 SRAL data within a specified time range from the Copernicus Data Space,
targeting data collected over the Arctic region.
Parameters:
start_date (str): Start date in 'YYYY-MM-DD' format.
end_date (str): End date in 'YYYY-MM-DD' format.
token (str): Access token for authentication.
Returns:
DataFrame: Contains details about the Sentinel-3 SRAL images.
"""
all_data = []
# arctic_polygon = "POLYGON((-180 60, 180 60, 180 90, -180 90, -180 60))"
arctic_polygon = (
"POLYGON ((-81.7 71.7, -81.7 73.8, -75.1 73.8, -75.1 71.7, -81.7 71.7))"
)
filter_string = (
f"Collection/Name eq 'SENTINEL-3' and "
f"Attributes/OData.CSC.StringAttribute/any(att:att/Name eq 'productType' and att/Value eq 'SR_2_LAN_SI') and "
f"ContentDate/Start gt {start_date}T00:00:00.000Z and ContentDate/Start lt {end_date}T23:59:59.999Z"
)
next_url = (
f"https://catalogue.dataspace.copernicus.eu/odata/v1/Products?"
f"$filter={filter_string} and "
f"OData.CSC.Intersects(area=geography'SRID=4326;{arctic_polygon}')&"
f"$top=1000"
)
headers = {"Authorization": f"Bearer {token}"}
while next_url:
response = make_api_request(
next_url, headers={"Authorization": f"Bearer {token}"}
)
if response.status_code == 200:
data = response.json()["value"]
all_data.extend(data)
next_url = response.json().get("@odata.nextLink")
else:
print(f"Error fetching data: {response.status_code} - {response.text}")
break
return pd.DataFrame(all_data)
def query_sentinel2_arctic_data(
start_date,
end_date,
token,
min_cloud_percentage=10,
max_cloud_percentage=50,
):
"""
Queries Sentinel-2 data within a specified time range from the Copernicus Data Space,
considering a range of cloud coverage by treating greater than and less than conditions as separate attributes.
Handles pagination to fetch all available data.
Parameters:
start_date (str): Start date in 'YYYY-MM-DD' format.
end_date (str): End date in 'YYYY-MM-DD' format.
token (str): Access token for authentication.
min_cloud_percentage (int): Minimum allowed cloud coverage.
max_cloud_percentage (int): Maximum allowed cloud coverage.
Returns:
DataFrame: Contains details about the Sentinel-2 images.
"""
all_data = []
arctic_polygon = "POLYGON((-180 60, 180 60, 180 90, -180 90, -180 60))"
filter_string = (
f"Collection/Name eq 'SENTINEL-2' and "
f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/Value ge {min_cloud_percentage}) and "
f"Attributes/OData.CSC.DoubleAttribute/any(att:att/Name eq 'cloudCover' and att/Value le {max_cloud_percentage}) and "
f"ContentDate/Start gt {start_date}T00:00:00.000Z and ContentDate/Start lt {end_date}T23:59:59.999Z"
)
next_url = (
f"https://catalogue.dataspace.copernicus.eu/odata/v1/Products?"
f"$filter={filter_string} and "
f"OData.CSC.Intersects(area=geography'SRID=4326;{arctic_polygon}')&"
f"$top=1000"
)
headers = {"Authorization": f"Bearer {token}"}
while next_url:
response = make_api_request(
next_url, headers={"Authorization": f"Bearer {token}"}
)
if response.status_code == 200:
data = response.json()["value"]
all_data.extend(data)
next_url = response.json().get("@odata.nextLink")
else:
print(f"Error fetching data: {response.status_code} - {response.text}")
break
return pd.DataFrame(all_data)
def plot_results(results):
m = folium.Map(location=[0, 0], zoom_start=2)
for idx, row in results.iterrows():
try:
geojson1 = json.loads(row["Satellite1_Footprint"].replace("'", '"'))
geojson2 = json.loads(row["Satellite2_Footprint"].replace("'", '"'))
folium.GeoJson(geojson1, name=row["Satellite1_Name"]).add_to(m)
folium.GeoJson(geojson2, name=row["Satellite2_Name"]).add_to(m)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
folium.LayerControl().add_to(m)
return m
def parse_geofootprint(footprint):
"""
Parses a JSON-like string to extract the GeoJSON and convert to a Shapely geometry.
"""
try:
geo_json = json.loads(footprint.replace("'", '"'))
return shape(geo_json)
except json.JSONDecodeError:
return None
def check_collocation(
df1, df2, start_date, end_date, time_window=pd.to_timedelta("1 day")
):
collocated = []
start_date = pd.to_datetime(start_date)
end_date = pd.to_datetime(end_date)
for idx1, row1 in df1.iterrows():
footprint1 = parse_geofootprint(row1["GeoFootprint"])
if footprint1 is None:
continue
s1_start = row1["ContentDate.Start"]
s1_end = row1["ContentDate.End"]
if s1_end < start_date or s1_start > end_date:
continue
s1_start_adjusted = s1_start - time_window
s1_end_adjusted = s1_end + time_window
for idx2, row2 in df2.iterrows():
footprint2 = parse_geofootprint(row2["GeoFootprint"])
if footprint2 is None:
continue
s2_start = row2["ContentDate.Start"]
s2_end = row2["ContentDate.End"]
if s2_end < start_date or s2_start > end_date:
continue
if max(s1_start_adjusted, s2_start) <= min(s1_end_adjusted, s2_end):
if footprint1.intersects(footprint2):
collocated.append(
{
"Satellite1_Name": row1["Name"],
"Satellite1_ID": row1["Id"],
"Satellite1_Footprint": row1["GeoFootprint"],
"Satellite2_Name": row2["Name"],
"Satellite2_ID": row2["Id"],
"Satellite2_Footprint": row2["GeoFootprint"],
"Overlap_Start": max(
s1_start_adjusted, s2_start
).isoformat(),
"Overlap_End": min(s1_end_adjusted, s2_end).isoformat(),
}
)
return pd.DataFrame(collocated)
def make_timezone_naive(dt):
"""Convert a timezone-aware datetime object to timezone-naive in local time."""
return dt.replace(tzinfo=None)
Step 1: Get the Metadata for satellites (Sentinel-2 and Sentinel-3 OLCI in this case)#
In this example, we illustrate how we co-locate Sentinel-2 and Sentinel-3 OLCI by fetching the metadata first (the same way we did in week 3). Since we are trying to find co-location between 2 satellites, we fetch 2 tables of metadata, representing 2 satellites we care about. In this case, they are named as object ‘sentinel3_olci_data’ and ‘sentinel2_data’.
username = ""
password = ""
access_token, refresh_token = get_access_and_refresh_token(username, password)
start_date = "2018-06-01"
end_date = "2018-06-02"
path_to_save_data = "/content/drive/MyDrive/GEOL0069/2425/Week 4/" # Here you can edit where you want to save your metadata
s3_olci_metadata = query_sentinel3_olci_arctic_data(
start_date, end_date, access_token
)
s2_metadata = query_sentinel2_arctic_data(
start_date,
end_date,
access_token,
min_cloud_percentage=0,
max_cloud_percentage=10,
)
# You can also save the metadata
s3_olci_metadata.to_csv(
path_to_save_data+"sentinel3_olci_metadata.csv",
index=False,
)
s2_metadata.to_csv(
path_to_save_data+"sentinel2_metadata.csv",
index=False,
)
You can try to print them to see what these 2 metadata look like.
from IPython.display import display
display(s3_olci_metadata)
@odata.mediaContentType | Id | Name | ContentType | ContentLength | OriginDate | PublicationDate | ModificationDate | Online | EvictionDate | S3Path | Checksum | ContentDate | Footprint | GeoFootprint | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | application/octet-stream | 9db390cf-cfb4-5112-a1ee-8e5f2707401e | S3A_OL_1_EFR____20180601T151119_20180601T15141... | application/octet-stream | 665998933 | 2018-11-01T18:21:34.729000Z | 2022-05-27T11:46:15.232535Z | 2023-12-07T10:40:04.296086Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/01/S3... | [{'Value': 'c4196f6241364e344781a6293ee6b664',... | {'Start': '2018-06-01T15:11:19.387000Z', 'End'... | geography'SRID=4326;POLYGON ((-80.6219 84.2705... | {'type': 'Polygon', 'coordinates': [[[-80.6219... |
1 | application/octet-stream | ee09722f-66c9-52b9-8bbd-0363cf04c922 | S3A_OL_1_EFR____20180601T114621_20180601T11492... | application/octet-stream | 0 | 2018-11-01T18:19:53.658000Z | 2018-06-19T00:00:39.937000Z | 2018-06-19T00:00:39.937000Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/01/S3... | [] | {'Start': '2018-06-01T11:46:20.946064Z', 'End'... | geography'SRID=4326;POLYGON ((155.003 85.3064,... | {'type': 'Polygon', 'coordinates': [[[155.003,... |
2 | application/octet-stream | c10d9364-e17b-5116-9a69-d3a2b0da6290 | S3A_OL_1_EFR____20180601T234515_20180601T23481... | application/octet-stream | 0 | 2018-11-01T18:25:20.964000Z | 2018-06-03T06:21:12.283000Z | 2018-06-03T06:21:12.283000Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/01/S3... | [] | {'Start': '2018-06-01T23:45:15.491665Z', 'End'... | geography'SRID=4326;POLYGON ((149.792 52.9492,... | {'type': 'Polygon', 'coordinates': [[[149.792,... |
3 | application/octet-stream | 782b7081-7099-50eb-8d70-02e9f28c3659 | S3A_OL_1_EFR____20180602T030414_20180602T03071... | application/octet-stream | 0 | 2018-11-01T18:27:26.718000Z | 2018-06-05T00:44:24.893000Z | 2018-06-05T00:44:24.893000Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T03:04:13.933307Z', 'End'... | geography'SRID=4326;POLYGON ((100.81 63.41, 10... | {'type': 'Polygon', 'coordinates': [[[100.81, ... |
4 | application/octet-stream | 89a02158-6ce3-51f0-a962-578c0cfc3061 | S3A_OL_1_EFR____20180602T212905_20180602T21320... | application/octet-stream | 675502028 | 2018-10-28T21:39:59.997000Z | 2022-05-27T11:46:17.436495Z | 2023-12-07T10:40:55.357408Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [{'Value': '37ea08b9364f92107c62a9a8f9eb84bf',... | {'Start': '2018-06-02T21:29:05.362000Z', 'End'... | geography'SRID=4326;POLYGON ((-175.044 84.2832... | {'type': 'Polygon', 'coordinates': [[[-175.044... |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
337 | application/octet-stream | 2ea51bd6-a5f9-5776-8245-47e577ed51ab | S3B_OL_1_EFR____20180602T230525_20180602T23061... | application/octet-stream | 0 | 2020-05-04T15:04:29.740000Z | 2020-05-04T16:12:07.135006Z | 2020-05-04T16:12:07.135006Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T23:05:24.783000Z', 'End'... | geography'SRID=4326;POLYGON ((-14.9877 75.3681... | {'type': 'Polygon', 'coordinates': [[[-14.9877... |
338 | application/octet-stream | 16ab2ce3-f5e8-5a02-ab83-ac5cdc858a93 | S3B_OL_1_EFR____20180602T230619_20180602T23091... | application/octet-stream | 0 | 2020-05-04T15:04:35.938000Z | 2020-05-04T16:12:35.026723Z | 2020-05-04T16:12:35.026723Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T23:06:18.951000Z', 'End'... | geography'SRID=4326;POLYGON ((-15.117827625966... | {'type': 'Polygon', 'coordinates': [[[-15.1178... |
339 | application/octet-stream | 2f363e42-e61c-59a4-a51f-f7ccacbc3da4 | S3B_OL_1_EFR____20180602T231219_20180602T23151... | application/octet-stream | 0 | 2020-05-04T14:41:59.925000Z | 2020-05-04T16:12:41.395265Z | 2020-05-04T16:12:41.395265Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T23:12:18.951000Z', 'End'... | geography'SRID=4326;MULTIPOLYGON (((180 72.052... | {'type': 'MultiPolygon', 'coordinates': [[[[18... |
340 | application/octet-stream | be1ab892-3859-5cf6-a734-6ec0b2eb91e8 | S3B_OL_1_EFR____20180602T231519_20180602T23181... | application/octet-stream | 0 | 2020-05-04T15:04:42.445000Z | 2020-05-04T16:12:53.766136Z | 2020-05-04T16:12:53.766136Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T23:15:18.951000Z', 'End'... | geography'SRID=4326;MULTIPOLYGON (((180 60.384... | {'type': 'MultiPolygon', 'coordinates': [[[[18... |
341 | application/octet-stream | b0e85f79-879d-5bf7-9fca-38183beabc8e | S3B_OL_1_EFR____20180602T230919_20180602T23121... | application/octet-stream | 0 | 2020-05-04T15:01:01.283000Z | 2021-06-09T09:36:33.879530Z | 2021-06-09T09:36:33.879530Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-3/OLCI/OL_1_EFR/2018/06/02/S3... | [] | {'Start': '2018-06-02T23:09:18.951000Z', 'End'... | geography'SRID=4326;MULTIPOLYGON (((159.995083... | {'type': 'MultiPolygon', 'coordinates': [[[[15... |
342 rows × 15 columns
from IPython.display import display
display(s2_metadata)
@odata.mediaContentType | Id | Name | ContentType | ContentLength | OriginDate | PublicationDate | ModificationDate | Online | EvictionDate | S3Path | Checksum | ContentDate | Footprint | GeoFootprint | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0 | application/octet-stream | 5d10f832-4996-4b0f-a68a-d58d1d43c0af | S2B_MSIL1C_20180602T204019_N0500_R014_T11XMB_2... | application/octet-stream | 526323201 | 2024-02-11T20:43:52.658000Z | 2024-02-12T01:18:36.812749Z | 2024-05-10T22:07:05.320823Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/02/S2... | [{'Value': '5a0d07cca2bf18265e9c7b84c64a3b8b',... | {'Start': '2018-06-02T20:40:19.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-116.68701 73.77... | {'type': 'Polygon', 'coordinates': [[[-116.687... |
1 | application/octet-stream | 0200999b-0712-4a70-b7a3-4160e5d4f95b | S2B_MSIL1C_20180601T225529_N0500_R001_T03WXU_2... | application/octet-stream | 660811240 | 2024-02-13T01:12:13.265000Z | 2024-02-13T02:35:07.552178Z | 2024-05-11T15:51:40.346618Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': '8c8ac895d615eadb229d86502cecc0b7',... | {'Start': '2018-06-01T22:55:29.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-162.22142 71.18... | {'type': 'Polygon', 'coordinates': [[[-162.221... |
2 | application/octet-stream | 18acad08-6956-4be5-ae83-4bdc98de0ed4 | S2B_MSIL1C_20180601T225529_N0500_R001_T03WXT_2... | application/octet-stream | 852541420 | 2024-02-13T01:17:09.637000Z | 2024-02-13T02:51:58.264249Z | 2024-05-11T15:52:03.476457Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': '5931cd226656090e0f59791788a9b87f',... | {'Start': '2018-06-01T22:55:29.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-162.343 70.2864... | {'type': 'Polygon', 'coordinates': [[[-162.343... |
3 | application/octet-stream | bdadee59-bb88-4917-b390-b7b5489e3833 | S2A_MSIL2A_20180602T163021_N0500_R083_T24XWP_2... | application/octet-stream | 22280115 | 2024-02-13T01:20:10.983000Z | 2024-02-13T02:52:05.329300Z | 2024-05-11T15:52:03.589105Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L2A_N0500/2018/06/02/S2... | [{'Value': 'd4d998840214282d5d32767c7807534b',... | {'Start': '2018-06-02T16:30:21.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-39.000977 79.32... | {'type': 'Polygon', 'coordinates': [[[-39.0009... |
4 | application/octet-stream | bbefc016-6256-4c03-936a-956b55a8c01c | S2B_MSIL2A_20180601T225529_N0500_R001_T03WWQ_2... | application/octet-stream | 834579698 | 2024-02-13T01:45:33.763000Z | 2024-02-13T03:15:06.336369Z | 2024-05-11T15:52:31.083470Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L2A_N0500/2018/06/01/S2... | [{'Value': '5f32124c8eebc9abc91c054854beab9a',... | {'Start': '2018-06-01T22:55:29.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-162.4922 66.889... | {'type': 'Polygon', 'coordinates': [[[-162.492... |
... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
1831 | application/octet-stream | 104b8330-d69e-4386-a064-ed8ee196ab50 | S2A_MSIL1C_20180601T151911_N0500_R068_T23XPA_2... | application/octet-stream | 472956692 | 2024-02-12T04:22:47.848000Z | 2024-02-12T06:48:36.921539Z | 2024-05-10T22:12:10.018582Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': 'ea86325888acc78d13dd18925d1e4fac',... | {'Start': '2018-06-01T15:19:11.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-38.61331 72.851... | {'type': 'Polygon', 'coordinates': [[[-38.6133... |
1832 | application/octet-stream | e4651090-9523-4f44-a468-b9c3f3a5da68 | S2A_MSIL1C_20180601T151911_N0500_R068_T24XWL_2... | application/octet-stream | 622640858 | 2024-02-12T04:25:08.829000Z | 2024-02-12T06:21:56.790984Z | 2024-05-10T22:11:46.425773Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': '5c2690bfb277f757d8067e9268decb3c',... | {'Start': '2018-06-01T15:19:11.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-39.000824 77.24... | {'type': 'Polygon', 'coordinates': [[[-39.0008... |
1833 | application/octet-stream | 540462fd-b1bd-4f26-b85c-a42094fc4f39 | S2A_MSIL1C_20180601T151911_N0500_R068_T26XMK_2... | application/octet-stream | 401623686 | 2024-02-12T04:22:49.076000Z | 2024-02-12T06:46:56.499748Z | 2024-05-10T22:12:09.531757Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': '888d3fcd13b446beaa2227447d79b5f9',... | {'Start': '2018-06-01T15:19:11.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-26.62741 76.419... | {'type': 'Polygon', 'coordinates': [[[-26.6274... |
1834 | application/octet-stream | 35bd8f3f-2052-4ff1-8c05-6dcd1e75af1a | S2A_MSIL1C_20180601T151911_N0500_R068_T26XNL_2... | application/octet-stream | 467324486 | 2024-02-12T04:22:44.206000Z | 2024-02-12T06:45:49.558232Z | 2024-05-10T22:12:09.097660Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L1C_N0500/2018/06/01/S2... | [{'Value': 'b35b3a24ddad056a9e8e90d109c7f745',... | {'Start': '2018-06-01T15:19:11.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-22.546234 77.21... | {'type': 'Polygon', 'coordinates': [[[-22.5462... |
1835 | application/octet-stream | 3abbc5ca-a6f9-461c-aa5b-cddff969b7f0 | S2A_MSIL2A_20180602T213531_N0500_R086_T05VPG_2... | application/octet-stream | 1102825610 | 2024-02-12T06:29:36.304000Z | 2024-02-12T07:42:22.928112Z | 2024-05-10T22:13:11.950724Z | True | 9999-12-31T23:59:59.999999Z | /eodata/Sentinel-2/MSI/L2A_N0500/2018/06/02/S2... | [{'Value': '2b6857b601c13224335def418ba3bf2f',... | {'Start': '2018-06-02T21:35:31.024000Z', 'End'... | geography'SRID=4326;POLYGON ((-149.22517 60.09... | {'type': 'Polygon', 'coordinates': [[[-149.225... |
1836 rows × 15 columns
From above, we can see that there are 342 rows for S3 OLCI and 1836 rows for S2. And next we will use these metadata to co-locate them and produce another table shows the details of the colocation pairs.
## Co-locate the data
In this section we use the metadata we have just produced to produce the co-location pair details. The logic of the code is match rows from S2 and S3 OLCI by their geo_footprint.
s3_olci_metadata = pd.read_csv(
path_to_save_data + "sentinel3_olci_metadata.csv"
)
s2_metadata = pd.read_csv(
path_to_save_data + "sentinel2_metadata.csv"
)
s3_olci_metadata["ContentDate.Start"] = pd.to_datetime(
s3_olci_metadata["ContentDate"].apply(lambda x: eval(x)["Start"])
).apply(make_timezone_naive)
s3_olci_metadata["ContentDate.End"] = pd.to_datetime(
s3_olci_metadata["ContentDate"].apply(lambda x: eval(x)["End"])
).apply(make_timezone_naive)
s2_metadata["ContentDate.Start"] = pd.to_datetime(
s2_metadata["ContentDate"].apply(lambda x: eval(x)["Start"])
).apply(make_timezone_naive)
s2_metadata["ContentDate.End"] = pd.to_datetime(
s2_metadata["ContentDate"].apply(lambda x: eval(x)["End"])
).apply(make_timezone_naive)
results = check_collocation(
s2_metadata, s3_olci_metadata, start_date, end_date,time_window=pd.to_timedelta("10 minutes")
)
As usual, you can have a look at the co-location output
from IPython.display import display
display(results.head(5))
Satellite1_Name | Satellite1_ID | Satellite1_Footprint | Satellite2_Name | Satellite2_ID | Satellite2_Footprint | Overlap_Start | Overlap_End | |
---|---|---|---|---|---|---|---|---|
0 | S2A_MSIL1C_20180601T201851_N0500_R071_T16XER_2... | 060f93fd-3342-47cd-839e-95c837de5fd2 | {'type': 'Polygon', 'coordinates': [[[-87.0012... | S3A_OL_1_EFR____20180601T201417_20180601T20171... | 58c1eed1-098c-5071-8fa3-568f5b090a53 | {'type': 'Polygon', 'coordinates': [[[-156.343... | 2018-06-01T20:14:17.050000 | 2018-06-01T20:17:17.050000 |
1 | S2A_MSIL1C_20180601T201851_N0500_R071_T16XER_2... | 060f93fd-3342-47cd-839e-95c837de5fd2 | {'type': 'Polygon', 'coordinates': [[[-87.0012... | S3B_OL_1_EFR____20180601T201324_20180601T20162... | 1e6552b0-0ca1-5214-b28a-8bb9f64ad19b | {'type': 'Polygon', 'coordinates': [[[-156.033... | 2018-06-01T20:13:24.146000 | 2018-06-01T20:16:24.146000 |
2 | S2A_MSIL2A_20180601T201851_N0500_R071_T11WNV_2... | 98cfb2f7-6d25-46ce-a68e-aeeee205ed22 | {'type': 'Polygon', 'coordinates': [[[-115.905... | S3A_OL_1_EFR____20180601T201717_20180601T20201... | 747af4a2-ccd0-5fc0-b46d-129267a18be7 | {'type': 'Polygon', 'coordinates': [[[-156.854... | 2018-06-01T20:17:17.050050 | 2018-06-01T20:20:17.050050 |
3 | S2A_MSIL2A_20180601T201851_N0500_R071_T11WNV_2... | 98cfb2f7-6d25-46ce-a68e-aeeee205ed22 | {'type': 'Polygon', 'coordinates': [[[-115.905... | S3B_OL_1_EFR____20180601T201624_20180601T20192... | ef311249-6684-5e35-aaa4-4768ab3694cd | {'type': 'Polygon', 'coordinates': [[[-156.735... | 2018-06-01T20:16:24.146000 | 2018-06-01T20:19:24.146000 |
4 | S2A_MSIL2A_20180601T201851_N0500_R071_T16XEN_2... | 7d345969-2abf-4de6-a9f4-3320d76c779b | {'type': 'Polygon', 'coordinates': [[[-81.9366... | S3A_OL_1_EFR____20180601T201417_20180601T20171... | 58c1eed1-098c-5071-8fa3-568f5b090a53 | {'type': 'Polygon', 'coordinates': [[[-156.343... | 2018-06-01T20:14:17.050000 | 2018-06-01T20:17:17.050000 |
With code below, you can visualise the co-located footprint.
from IPython.display import display
map_result = plot_results(results.head(5))
display(map_result)
Proceeding with Sentinel-3 OLCI Download#
Moving forward, we turn our attention to downloading the Sentinel-3 OLCI data. The process mirrors the approach we took with Sentinel-2, maintaining consistency in our methodology. We’ll apply the same logic of filename conversion and follow the structured steps to retrieve the data from the Copernicus dataspace.
download_dir = "" # Replace with your desired download directory
product_id = results['Satellite1_ID'][0] # Replace with your desired file id
file_name = results['Satellite1_Name'][0]# Replace with your desired filename
# Download the single product
download_single_product(product_id, file_name, access_token, download_dir)
## Sentinel-3 SRAL
It is also possible to co-locate S2/S3 OLCI with S3 SRAL (altimetry data). The overall logic is the same, we just need to fetch the S3 SRAL metadata.
sentinel3_sral_data = query_sentinel3_sral_arctic_data(
start_date, end_date, access_token
)
sentinel3_sral_data.to_csv(
path_to_save_data + "s3_sral_metadata.csv",
index=False,
)
And now you do the co-locaton again for S3 SRAL with S2 for example.
s3_sral_metadata = pd.read_csv(
path_to_save_data + "s3_sral_metadata.csv"
)
s2_metadata = pd.read_csv(
path_to_save_data + "sentinel2_metadata.csv"
)
s3_sral_metadata["ContentDate.Start"] = pd.to_datetime(
s3_sral_metadata["ContentDate"].apply(lambda x: eval(x)["Start"])
).apply(make_timezone_naive)
s3_sral_metadata["ContentDate.End"] = pd.to_datetime(
s3_sral_metadata["ContentDate"].apply(lambda x: eval(x)["End"])
).apply(make_timezone_naive)
s2_metadata["ContentDate.Start"] = pd.to_datetime(
s2_metadata["ContentDate"].apply(lambda x: eval(x)["Start"])
).apply(make_timezone_naive)
s2_metadata["ContentDate.End"] = pd.to_datetime(
s2_metadata["ContentDate"].apply(lambda x: eval(x)["End"])
).apply(make_timezone_naive)
results = check_collocation(
s2_metadata, s3_sral_metadata, start_date, end_date,time_window=pd.to_timedelta("10 minutes")
)
And now you can plot the co-location results again.
from IPython.display import display
map_result = plot_results(results.head(5))
display(map_result)