diff --git a/.gitignore b/.gitignore
index 8d4c3835ba628fc69e2c8b457f0da53357e56578..5c1a962d4c0272b09145290f6326985ce1fc36ac 100644
--- a/.gitignore
+++ b/.gitignore
@@ -7,3 +7,4 @@ __pycache__
 tests/data*
 draft
 examples/data
+docs/examples
\ No newline at end of file
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8a244673115225e788553dec6a6e3808721b455a..41bc8c8baeae998a8d10ea57e7e852e25fb69ccc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -16,6 +16,9 @@ tests:
     - micromamba env list
     - pip install -e .
     - pytest -s
+  rules:
+   - if: $CI_PIPELINE_SOURCE == "merge_request_event"
+   - if: $CI_COMMIT_BRANCH =~ /.*/
   
 pages:
   stage: pages
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..6387fd98751302a3760bbcf7e37a29a0241c84c6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -0,0 +1,19 @@
+# v1.1.0
+
+## Add
+
+- function `write_assets`: write item assets (rasters only at the moment) of an ItemCollection locally and return the corresponding ItemCollection.
+- function `harmonize_sen2cor_offset`: adds an `offset` property to the assets so it is taken into account by `to_xarray`.
+- method `ItemCollection.drop_duplicates`: drop duplicated ID returned by pgstac.
+- method `ItemCollection.drop_non_raster`: drop non raster assets.
+- function `extract_points` and method `ItemCollection.extract_points` to extract points time series.
+- `writer_args` to `ItemCollection.apply_...` methods and function in order to specify the outputs format, e.g. the encoding.
+- in local.py, `start_datetime` and `end_datetime` can now be used instead of `datetime` in the template used to build a local ItemCollection.
+- module `extents.py` to manipulate STAC extents.
+- tests for CI
+
+## Fix
+
+- `apply_formula` with "in" operator in apply_formula.
+- COG type of local STAC assets (instead of GTiff)
+- imports in `simplestac.utils`
diff --git a/pyproject_doc.toml b/pyproject_doc.toml
index 1fa654e98aa487e4407c397e8989bf2daa208e32..6d948dec66e75376f6caaca0d55fd8ae08283ce7 100644
--- a/pyproject_doc.toml
+++ b/pyproject_doc.toml
@@ -53,7 +53,14 @@ features=["navigation.top"]
     icon = "material/theme-light-dark"
     name = "Switch to light mode"
 
+  [[tool.portray.mkdocs.theme.palette]]
+  scheme = "default"
+  primary = "teal"
+  accent = "blue"
 
+    [tool.portray.mkdocs.theme.palette.toggle]  
+    icon = "material/theme-light-dark"
+    name = "Switch to dark mode"
 # favicon = "docs/icon/....png"
 # logo = "docs/icon/....png"
 
diff --git a/simplestac/__init__.py b/simplestac/__init__.py
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..e60be81e2a41d455e24f86fc119a3c451f0a4538 100644
--- a/simplestac/__init__.py
+++ b/simplestac/__init__.py
@@ -0,0 +1,7 @@
+from importlib.metadata import version, PackageNotFoundError
+
+try:
+    __version__ = version("simplestac")
+except PackageNotFoundError:
+    # package is not installed
+    pass
\ No newline at end of file
diff --git a/simplestac/extents.py b/simplestac/extents.py
index 1979f67ef37ff3e05e7b54eb4eff74298ee0028c..ef69713c829affff2709345f4245866b27b087cb 100644
--- a/simplestac/extents.py
+++ b/simplestac/extents.py
@@ -1,12 +1,61 @@
 """
-Module to deal with STAC Extents.
+Deal with STAC Extents.
+
+# AutoSpatialExtent
+
+The vanilla `pystac.SpatialExtent` enables to describe a spatial extent 
+from several bounding boxes. While this is useful, sometimes we want to
+merge together bounding boxes that are partially overlapping. For instance,
+we can take the example of the France mainland, covered by multiple remote
+sensing products that are generally partially overlapping, and the Corse 
+island that is also covered by a number of RS products, but spatially 
+disjoint from the France mainland RS products bounding boxes. In this 
+particular exemple, we would like to regroup all RS products bounding 
+boxes so that there is one bbox for the France mainland, and another 
+bbox for the Corse island. This is particularly useful when a STAC 
+collection covers sparsely a broad area (e.g. worldwide), with several 
+isolated regions.
+
+The `AutoSpatialExtent` is an extension of the `pystac.SpatialExtent`.
+
+Instances are initialized with the same arguments as `pystac.SpatialExtent`.
+Internally, bounding boxes lists are processed at initialisation, so all 
+partially overlapping bounding boxes are merged and updated as a single one.
+
+# AutoTemporalExtent
+
+The `AutoTemporalExtent` is an extension of the `pystac.TemporalExtent`.
+It computes the date min and date max of a set of dates or dates ranges.
+
 """
 import pystac
 from dataclasses import dataclass
 from datetime import datetime
-from typing import Union
+from typing import Union, List
+from numbers import Number
+from annotated_types import Predicate
+from typing_extensions import Annotated
+
+
+def is_bbox(l: List) -> bool:
+    """
+    Predicate to test is the input list represents a bounding box in WGS84.
+
+    Args:
+        l: a list of `Number`
+
+    Returns:
+
+    """
+    if len(l) == 4:
+        if all(isinstance(i, Number) for i in l):
+            if -180 <= l[0] and -90 <= l[1] and l[2] <= 180 and l[3] <= 90:
+                if l[0] <= l[2] and l[1] <= l[3]:
+                    return True
+    return False
+
 
-Coords = list[Union[int, float]]
+Bbox = Annotated[list, Predicate(is_bbox)]
 
 
 @dataclass
@@ -14,7 +63,7 @@ class SmartBbox:
     """
     Small class to work with a single 2D bounding box.
     """
-    coords: Coords = None  # [xmin, ymin, xmax, ymax]
+    coords: Bbox = None  # [xmin, ymin, xmax, ymax]
 
     def touches(self, other: "SmartBbox") -> bool:
         """
@@ -53,7 +102,7 @@ class SmartBbox:
             ]
 
 
-def clusterize_bboxes(bboxes: list[Coords]) -> list[Coords]:
+def clusterize_bboxes(bboxes: list[Bbox]) -> list[Bbox]:
     """
     Computes a list of bounding boxes regrouping all overlapping ones.
 
@@ -64,74 +113,28 @@ def clusterize_bboxes(bboxes: list[Coords]) -> list[Coords]:
         list of 2D bounding boxes (list of int of float)
 
     """
-    # Regroup bboxes into clusters of bboxes
-    smart_bboxes = [SmartBbox(bbox) for bbox in bboxes]
-    clusters = bboxes_to_bboxes_clusters(smart_bboxes)
-
-    # Compute clusters extents
-    clusters_unions = [SmartBbox() for _ in clusters]
-    for i, cluster in enumerate(clusters):
-        for smart_bbox in cluster:
-            clusters_unions[i].update(smart_bbox)
-
-    return [smart_bbox.coords.copy() for smart_bbox in clusters_unions]
-
-
-def bboxes_to_bboxes_clusters(smart_bboxes: list[SmartBbox]) -> list[
-    list[SmartBbox]]:
-    """
-    Transform a list of bounding boxes into a nested list of clustered ones.
-
-    Args:
-        smart_bboxes: a list of `SmartBbox` instances
-
-    Returns:
-        a list of `SmartBbox` instances list
-
-    """
-    clusters_labels = compute_smart_bboxes_clusters(smart_bboxes)
-    clusters_bboxes = [[] for _ in range(max(clusters_labels) + 1)]
-    for smart_bbox, labels in zip(smart_bboxes, clusters_labels):
-        clusters_bboxes[labels].append(smart_bbox)
-    return clusters_bboxes
-
-
-def compute_smart_bboxes_clusters(smart_bboxes: list[SmartBbox]) -> list[int]:
-    """
-    Compute the extent of a cluster of `SmartBbox` instances.
-
-    Args:
-        smart_bboxes: a list of `SmartBbox` instances
-
-    Returns:
-        a vector of same size as `smart_bboxes` with the group numbers (int)
-
-    """
-    labels = len(smart_bboxes) * [None]
-    group = 0
-
-    def dfs(index: int):
-        """
-        Deep first search with o(n) complexity.
-
-        Args:
-            index: vertex index.
+    bboxes = [SmartBbox(bbox) for bbox in bboxes]
+    clusters = [bboxes.pop()]
+
+    while bboxes:
+        bbox = bboxes.pop()
+        inter_clusters = [
+            i for i, cluster in enumerate(clusters) if bbox.touches(cluster)
+        ]
+        if inter_clusters:
+            # We merge all intersecting clusters into one
+            clusters[inter_clusters[0]].update(bbox)
+            for i in inter_clusters[1:]:
+                clusters[inter_clusters[0]].update(clusters[i])
+            clusters = [
+                cluster
+                for i, cluster in enumerate(clusters)
+                if i not in inter_clusters[1:]
+            ]
+        else:
+            clusters.append(bbox)
 
-        """
-        labels[index] = group
-        cur_item = smart_bboxes[index]
-        for i, (item, label) in enumerate(zip(smart_bboxes, labels)):
-            if i != index and cur_item.touches(item) and labels[i] is None:
-                dfs(i)
-
-    while any(label is None for label in labels):
-        next_unmarked = next(
-            i for i, label in enumerate(labels)
-            if label is None
-        )
-        dfs(next_unmarked)
-        group += 1
-    return labels
+    return [cluster.coords for cluster in clusters]
 
 
 class AutoSpatialExtent(pystac.SpatialExtent):
@@ -151,7 +154,7 @@ class AutoSpatialExtent(pystac.SpatialExtent):
         super().__init__(*args, **kwargs)
         self.clusterize_bboxes()
 
-    def update(self, other: pystac.SpatialExtent | Coords):
+    def update(self, other: pystac.SpatialExtent | Bbox):
         """
         Updates itself with a new spatial extent or bounding box. Modifies
         inplace `self.bboxes`.
@@ -206,11 +209,15 @@ class AutoTemporalExtent(pystac.TemporalExtent):
 
     def make_single_interval(self):
         all_dates = []
-        for interval in self.intervals:
-            if isinstance(interval, (list, tuple)):
-                all_dates += [i for i in interval]
-            elif isinstance(interval, datetime):
-                all_dates.append(interval)
-            else:
-                TypeError(f"Unsupported date/range of: {interval}")
-        self.intervals = [[min(all_dates), max(all_dates)]]
+        for dates_or_intervals in self.intervals:
+            # Because base class (`pystac.SpatialExtent`) converts everything 
+            # into [[...]]
+            for date_or_interval in dates_or_intervals:
+                if isinstance(date_or_interval, (list, tuple)):
+                    all_dates += [i for i in date_or_interval if i is not None]
+                elif isinstance(date_or_interval, datetime):
+                    all_dates.append(date_or_interval)
+                else:
+                    TypeError(f"Unsupported date/range of: {date_or_interval}")
+        self.intervals = \
+            [[min(all_dates), max(all_dates)]] if all_dates else [None, None]
diff --git a/simplestac/utils.py b/simplestac/utils.py
index fecce7200b80f5d58e586046c3bf381aa08d2995..9ed7813daf1abdc5d3bc469de23d41aefafa50cc 100644
--- a/simplestac/utils.py
+++ b/simplestac/utils.py
@@ -19,6 +19,7 @@ from tqdm import tqdm
 from typing import Union
 import warnings
 import datetime
+import geopandas as gpd
 
 from simplestac.local import stac_asset_info_from_raster, properties_from_assets
 
@@ -90,12 +91,18 @@ class ExtendPystacClasses:
         # times = pd.to_datetime(
         with warnings.catch_warnings():
             warnings.filterwarnings("ignore", category=UserWarning)
-            arr = stackstac.stack(self, xy_coords=xy_coords, **kwargs)
+            try:
+                arr = stackstac.stack(self, xy_coords=xy_coords, **kwargs)
+            except ValueError as e:
+                if "Cannot automatically compute the resolution" in str(e):
+                    raise ValueError(str(e)+"\nOr drop non-raster assets from collection with ItemCollection.drop_non_raster()")
+                else:
+                    raise e
 
         if bbox is not None:
             arr = arr.rio.clip_box(*bbox)
         if geometry is not None:
-            if hasattr(geometry, 'crs') and geometry.crs != arr.rio.crs:
+            if hasattr(geometry, 'crs') and not geometry.crs.equals(arr.rio.crs):
                 logger.debug(f"Reprojecting geometry from {geometry.crs} to {arr.rio.crs}")
                 geometry = geometry.to_crs(arr.rio.crs)
             arr = arr.rio.clip(geometry)
@@ -486,6 +493,66 @@ class ExtendPystacClasses:
         if not inplace:
             return x
 
+    def extract_points(self, points, method="nearest", tolerance="pixel", drop=False, **kwargs):
+        """Extract points from xarray
+
+        Parameters
+        ----------
+        x : xarray.DataArray or xarray.Dataset
+        points : geopandas.GeoDataFrame or pandas.DataFrame
+            Points or coordinates of the points
+        method, tolerance, drop : see xarray.DataArray.sel
+            Additional keyword arguments passed to xarray.DataArray.sel
+            If tolerance is "pixel", it is set to half the resolution
+            of the xarray, supposing it is a rioxarray.
+        **kwargs:
+            Additional keyword arguments passed to `ItemCollection.to_xarray()`
+            
+        Returns
+        -------
+        xarray.DataArray or xarray.Dataset
+            The points values with points index as coordinate.
+            The returned xarray can then be converted to
+            dataframe with `to_dataframe` or `to_dask_dataframe`.
+
+        Examples
+        --------
+        >>> import xarray as xr
+        >>> import pandas as pd
+        >>> import dask.array
+        >>> import numpy as np
+        >>> da = xr.DataArray(
+        ... # np.random.random((100,200)),
+        ... dask.array.random.random((100,200,10), chunks=10),
+        ... coords = [('x', np.arange(100)+.5), 
+        ...           ('y', np.arange(200)+.5),
+        ...           ('z', np.arange(10)+.5)]
+        ... ).rename("pixel_value")
+        >>> df = pd.DataFrame(
+        ...    dict(
+        ...        x=np.random.permutation(range(100))[:100]+np.random.random(100),
+        ...        y=np.random.permutation(range(100))[:100]+np.random.random(100),
+        ...        other=range(100),
+        ...    )
+        ... )
+        >>> df.index.rename("id_point", inplace=True)
+        >>> extraction = extract_points(da, df, method="nearest", tolerance=.5)
+        >>> ext_df = extraction.to_dataframe()
+        >>> ext_df.reset_index(drop=False, inplace=True)
+        >>> ext_df.rename({k: k+"_pixel" for k in da.dims}, axis=1, inplace=True)
+        >>> # join extraction to original dataframe
+        >>> df.merge(ext_df, on=["id_point"])
+        """ 
+
+        # avoid starting anything if not all points
+        if isinstance(points, (gpd.GeoDataFrame, gpd.GeoSeries)):
+            if not points.geom_type.isin(['Point', 'MultiPoint']).all():
+                raise ValueError("All geometries must be of type Point or MultiPoint")
+        
+        arr = self.to_xarray(**kwargs)#geometry=points)
+        if tolerance == "pixel":
+            tolerance = arr.rio.resolution()[0] / 2
+        return extract_points(arr, points, method=method, tolerance=tolerance, drop=drop)
 
 class ItemCollection(pystac.ItemCollection, ExtendPystacClasses):
     pass
@@ -504,6 +571,7 @@ def write_assets(x: Union[ItemCollection, pystac.Item],
                  remove_item_props=DEFAULT_REMOVE_PROPS,
                  overwrite=False,
                  progress=True,
+                 writer_args=None,
                  **kwargs):
     """
     Writes item(s) assets to the specified output directory.
@@ -524,9 +592,16 @@ def write_assets(x: Union[ItemCollection, pystac.Item],
         If None, no properties are removed.
     overwrite : bool, optional
         Whether to overwrite existing files. Defaults to False.
+    writer_args : dict, optional
+        Arguments to pass to write_raster for each asset. Defaults to `None`.
+        See Notes for an example.
     **kwargs
         Additional keyword arguments passed to write_raster.
 
+    Returns
+    -------
+    ItemCollection
+        The item collection with the metadata updated with local asset paths.
     """    
     if isinstance(x, pystac.Item):
         x = [x]
@@ -535,16 +610,23 @@ def write_assets(x: Union[ItemCollection, pystac.Item],
     items = []
     for item in tqdm(x, disable=not progress):
         ic = ItemCollection([item], clone_items=True)
-        arr = ic.to_xarray(bbox=bbox, xy_coords=xy_coords).squeeze("time")
+        arr = ic.to_xarray(bbox=bbox, xy_coords=xy_coords, ).squeeze("time")
         item_dir = (output_dir / item.id).mkdir_p()
         for b in arr.band.values:
             filename = '_'.join([item.id, b+'.tif'])
             file = item_dir / f"{filename}"
+            
+            # Use specific writer args if available
+            if writer_args is not None and b in writer_args:
+                wa = writer_args[b]
+            else:
+                wa = kwargs
+            
             try:
                 if file.exists() and not overwrite:
                     logger.debug(f"File already exists, skipping asset: {file}")
                 else:
-                    write_raster(arr.sel(band=b), file, **kwargs)
+                    write_raster(arr.sel(band=b), file, **wa)
                 
                 # update stac asset info            
                 stac_info = stac_asset_info_from_raster(file)
@@ -609,47 +691,6 @@ def update_item_properties(x: pystac.Item, remove_item_props=DEFAULT_REMOVE_PROP
         for k in pop_props:
             x.properties.pop(k)
 
-
-def harmonize_sen2cor_offet(collection, bands=S2_SEN2COR_BANDS, inplace=False):
-    """
-    Harmonize new Sentinel-2 item collection (Sen2Cor v4+, 2022-01-25)
-    to the old baseline (v3-):
-    adds an offset of -1000 to all band assets of items
-    with datetime >= 2022-01-25
-
-    Parameters
-    ----------
-    data: ItemCollection
-        An item collection of S2 scenes
-    bands: list
-        A list of bands to harmonize
-
-    Returns
-    -------
-    ItemCollection
-        A collection of S2 scenes with extra_fields["raster:bands"]
-        added/updated to each band asset with datetime >= 2022-01-25.
-    
-    Notes
-    -----
-    References:
-    - https://planetarycomputer.microsoft.com/dataset/sentinel-2-l2a#Baseline-Change
-    - https://github.com/microsoft/PlanetaryComputer/issues/134
-    """
-    
-    if not inplace:
-        collection = collection.copy()
-    for item in collection:
-        for asset in bands:
-            if asset in item.assets:
-                if item.properties["datetime"] >= "2022-01-25":
-                    item.assets[asset].extra_fields["raster:bands"] = [dict(offset=-1000)]
-                else:
-                    item.assets[asset].extra_fields["raster:bands"] = [dict(offset=0)]
-    if inplace:
-        return collection
-
-
 def apply_item(x, fun, name, output_dir, overwrite=False,
                copy=True, bbox=None, geometry=None, writer_args=None, **kwargs):
     """
@@ -876,4 +917,111 @@ def apply_formula(x, formula):
 
     return eval(formula)
 
+def harmonize_sen2cor_offset(x, bands=S2_SEN2COR_BANDS, inplace=False):
+    """
+    Harmonize new Sentinel-2 item collection (Sen2Cor v4+, 2022-01-25)
+    to the old baseline (v3-):
+    adds an offset of -1000 to the asset extra field "raster:bands" of the items
+    with datetime >= 2022-01-25
+
+    Parameters
+    ----------
+    x: ItemCollection
+        An item collection of S2 scenes
+    bands: list
+        A list of bands to harmonize
+    
+    inplace: bool
+        Whether to modify the collection in place. Defaults to False.
+        In that case, a cloned collection is returned.
+
+    Returns
+    -------
+    ItemCollection
+        A collection of S2 scenes with extra_fields["raster:bands"]
+        added/updated to each band asset with datetime >= 2022-01-25.
+    
+    Notes
+    -----
+    References:
+    - https://planetarycomputer.microsoft.com/dataset/sentinel-2-l2a#Baseline-Change
+    - https://github.com/microsoft/PlanetaryComputer/issues/134
+    """
+    
+    if not inplace:
+        x = x.clone()
+    for item in x:
+        for asset in bands:
+            if asset in item.assets:
+                if item.properties["datetime"] >= "2022-01-25":
+                    item.assets[asset].extra_fields["raster:bands"] = [dict(offset=-1000)]
+                else:
+                    item.assets[asset].extra_fields["raster:bands"] = [dict(offset=0)]
+    if inplace:
+        return x
+
+def extract_points(x, points, method=None, tolerance=None, drop=False):
+    """Extract points from xarray
+
+    Parameters
+    ----------
+    x : xarray.DataArray or xarray.Dataset
+    points : geopandas.GeoDataFrame or pandas.DataFrame
+        Points or coordinates of the points
+    method, tolerance, drop : see xarray.DataArray.sel
+        Additional keyword arguments passed to xarray.DataArray.sel
+
+    Returns
+    -------
+    xarray.DataArray or xarray.Dataset
+        The points values with points index as coordinate.
+        The returned xarray can then be converted to
+        dataframe with `to_dataframe` or `to_dask_dataframe`.
+
+    Examples
+    --------
+    >>> import xarray as xr
+    >>> import pandas as pd
+    >>> import dask.array
+    >>> import numpy as np
+    >>> da = xr.DataArray(
+    ... # np.random.random((100,200)),
+    ... dask.array.random.random((100,200,10), chunks=10),
+    ... coords = [('x', np.arange(100)+.5), 
+    ...           ('y', np.arange(200)+.5),
+    ...           ('z', np.arange(10)+.5)]
+    ... ).rename("pixel_value")
+    >>> df = pd.DataFrame(
+    ...    dict(
+    ...        x=np.random.permutation(range(100))[:100]+np.random.random(100),
+    ...        y=np.random.permutation(range(100))[:100]+np.random.random(100),
+    ...        other=range(100),
+    ...    )
+    ... )
+    >>> df.index.rename("id_point", inplace=True)
+    >>> extraction = extract_points(da, df, method="nearest", tolerance=.5)
+    >>> ext_df = extraction.to_dataframe()
+    >>> ext_df.reset_index(drop=False, inplace=True)
+    >>> ext_df.rename({k: k+"_pixel" for k in da.dims}, axis=1, inplace=True)
+    >>> # join extraction to original dataframe
+    >>> df.merge(ext_df, on=["id_point"])
+
+    """
+    # x = da
+    valid_types = (gpd.GeoDataFrame, gpd.GeoSeries)
+    if isinstance(points, valid_types):
+        if not points.geom_type.isin(['Point', 'MultiPoint']).all():
+            raise ValueError("All geometries must be of type Point")
+
+    if isinstance(points, valid_types):
+        if hasattr(points, 'crs') and not points.crs.equals(x.rio.crs):
+            logger.debug(f"Reprojecting points from {points.crs} to {x.rio.crs}")
+            points = points.to_crs(x.rio.crs)
+        points = points.get_coordinates()
+
+    xk = x.dims
+    coords_cols = [c for c in points.keys() if c in xk]
+    coords = points[coords_cols]
+    points = x.sel(coords.to_xarray(), method=method, tolerance=tolerance, drop=drop)
+    return points
 #######################################
\ No newline at end of file
diff --git a/tests/test_extents.py b/tests/test_extents.py
new file mode 100644
index 0000000000000000000000000000000000000000..f5d8fb29fe9789d592f84794f74a0c62a5ecacf1
--- /dev/null
+++ b/tests/test_extents.py
@@ -0,0 +1,60 @@
+import pytest
+from simplestac.extents import AutoSpatialExtent, AutoTemporalExtent
+
+from datetime import datetime
+
+def test_spatial_extent():
+    """
+    Test the `AutoSpatialExtent` class.
+
+    Two clusters of bboxes (i.e. lists of bboxes) composed respectively with 2 and 1 bbox are 
+    created (by definition, the clusters are disjoint: their bboxes don't overlap)
+    We instanciate an `AutoSpatialExtent` and we check that the two expected clusters are found.
+
+    """
+    # first cluster (e.g. "france mainland")
+    bbox1 = [4, 42, 6, 44]
+    bbox2 = [3, 41, 5, 43]
+
+    # second cluster (e.g. "corse")
+    bbox3 = [7, 42, 8, 50]
+
+    ase = AutoSpatialExtent(bboxes=[bbox1, bbox2, bbox3])
+    assert ase.bboxes == [[7, 42, 8, 50], [3, 41, 6, 44]]
+
+def test_temporal_extent():
+    """
+    Test the `AutoTemporalExtent`.
+
+    """
+    # dates only (as plain list)
+    dates = [
+        datetime(year=2020, month=1, day=1),
+        datetime(year=2022, month=1, day=1),
+        datetime(year=2023, month=1, day=1),
+    ]
+    auto_text = AutoTemporalExtent(dates)
+    assert auto_text.intervals == [[
+        datetime(year=2020, month=1, day=1),
+        datetime(year=2023, month=1, day=1)
+    ]]
+
+    # dates only (as nested list)
+    auto_text = AutoTemporalExtent([dates])
+    assert auto_text.intervals == [[
+        datetime(year=2020, month=1, day=1),
+        datetime(year=2023, month=1, day=1)
+    ]]
+
+    # mixed dates + ranges
+    auto_text = AutoTemporalExtent([
+        datetime(year=2020, month=1, day=1),
+        [None, None],
+        [datetime(year=2019, month=1, day=1), None],
+        [None, datetime(year=2024, month=1, day=1)],
+        datetime(year=2023, month=1, day=1)
+    ])
+    assert auto_text.intervals == [[
+        datetime(year=2019, month=1, day=1),
+        datetime(year=2024, month=1, day=1)
+    ]]
diff --git a/tests/test_local.py b/tests/test_local.py
index 676448ccdbf39841938d0af8d2c20d0c082285ac..3471050eacc44322c622fb492298d52b7f048c85 100644
--- a/tests/test_local.py
+++ b/tests/test_local.py
@@ -2,6 +2,9 @@ from simplestac.local import collection_format, build_item_collection
 from simplestac.utils import write_raster, apply_formula
 import xarray as xr
 import pystac
+from shapely.geometry import MultiPoint
+import geopandas as gpd
+
 
 def test_formatting():
     fmt = collection_format()
@@ -86,7 +89,7 @@ def test_apply_items_raster_args(s2scene_dir, roi):
                 dtype="int16", 
                 scale_factor=0.001,
                 add_offset=0.0,
-                _FillValue= 2**15 - 1,
+                _FillValue= -2**15,
             ),
         )
     )
@@ -95,9 +98,20 @@ def test_apply_items_raster_args(s2scene_dir, roi):
     assert rb["datatype"] == "int16"
     assert rb["scale"] == 0.001
     assert rb["offset"] == 0.0
-    assert rb["nodata"] == 2**15 - 1
+    assert rb["nodata"] == -2**15
+
+def test_extract_points(s2scene_dir, roi):
+    col = build_item_collection(s2scene_dir, collection_format())
+    points = roi.geometry.apply(lambda x: MultiPoint(list(x.exterior.coords)))
+    points.index.rename("id_point", inplace=True)
+    ext = col.extract_points(points)
+    assert ext.id_point.isin(points.index.values).all()
+    coords = points.get_coordinates().reset_index(drop=True)
+    points = gpd.GeoSeries(gpd.points_from_xy(**coords, crs=roi.crs))
+    points.index.rename("id_point", inplace=True)
+    ext = col.extract_points(points)
+    assert ext.id_point.isin(points.index.values).all()
 
-    
 ############################################################
     
 
diff --git a/tests/test_remote.py b/tests/test_remote.py
index 3fced923aab8aa668c6cf2d777d0193baa17417f..33339827156793213f4aa0e1e96f7d1fdd9bc140 100644
--- a/tests/test_remote.py
+++ b/tests/test_remote.py
@@ -1,8 +1,23 @@
-from simplestac.utils import write_assets, ItemCollection, harmonize_sen2cor_offet
+from simplestac.utils import write_assets, ItemCollection, harmonize_sen2cor_offset
 import planetary_computer as pc
 import pystac_client
 
 URL = "https://planetarycomputer.microsoft.com/api/stac/v1"
+
+def test_to_xarray(roi, s2scene_pc_dir):
+    time_range = "2022-01-01/2022-01-31"
+
+    catalog = pystac_client.Client.open(URL, modifier=pc.sign_inplace)
+    search = catalog.search(
+        collections=["sentinel-2-l2a"],
+        bbox=roi.to_crs(4326).total_bounds,
+        datetime=time_range,
+        sortby="datetime",
+    )
+    col = ItemCollection(search.item_collection())
+    x = col.drop_non_raster().to_xarray()
+    assert len(x.time) == len(col)
+
 def test_offset_harmonization(roi, s2scene_pc_dir):
     time_range = "2022-01-20/2022-01-31"
 
@@ -14,14 +29,30 @@ def test_offset_harmonization(roi, s2scene_pc_dir):
         sortby="datetime",
     )
     col = search.item_collection()
-    harmonize_sen2cor_offet(col, inplace=True)
+    harmonize_sen2cor_offset(col, inplace=True)
     of0 = col[0].assets["B02"].extra_fields["raster:bands"][0]["offset"]
     ofN = col[-1].assets["B02"].extra_fields["raster:bands"][0]["offset"]
 
     assert of0 == 0
     assert ofN == -1000
 
+def test_drop_duplicates(roi, s2scene_pc_dir):
+    time_range = "2022-01-20/2022-01-31"
+    catalog = pystac_client.Client.open(URL, modifier=pc.sign_inplace)
+    search = catalog.search(
+        collections=["sentinel-2-l2a"],
+        bbox=roi.to_crs(4326).total_bounds,
+        datetime=time_range,
+        sortby="datetime",
+    )
+    col = search.item_collection()
+    col1 = ItemCollection(col.clone()+col.clone())
+    col1.drop_duplicates(inplace=True)
+    assert len(col1) == len(col)
+
 def test_write_assets(roi, s2scene_pc_dir):
+
+    s2scene_pc_dir.rmtree_p().mkdir_p()
     time_range = "2016-01-01/2016-01-31"
 
     catalog = pystac_client.Client.open(URL)
@@ -35,7 +66,6 @@ def test_write_assets(roi, s2scene_pc_dir):
     col = ItemCollection(search.item_collection()).drop_non_raster()
     bbox = roi.to_crs(col.to_xarray().rio.crs).total_bounds
     col = pc.sign(col)
-    s2scene_pc_dir.rmtree_p().mkdir_p()
     encoding=dict(
         dtype="int16", 
         scale_factor=0.001,
@@ -48,6 +78,7 @@ def test_write_assets(roi, s2scene_pc_dir):
     item = new_col[0]
     assert item.id == col[0].id
     assert len(item.assets) == len(s2scene_pc_dir.dirs()[0].files("*.tif"))
+    assert item.assets["B08"].href.startswith(s2scene_pc_dir)
     assert new_col[0].assets["B08"].extra_fields["raster:bands"][0]["scale"] == 0.001