From 483b8d1d8fe38d6e6075bf7c0d95aca334f0f579 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sat, 25 Feb 2023 20:06:39 -0500 Subject: [PATCH 01/16] ruff-en code base --- spopt/locate/base.py | 1 - spopt/locate/coverage.py | 14 ++-- spopt/locate/p_center.py | 4 +- spopt/locate/p_dispersion.py | 2 +- spopt/locate/p_median.py | 4 +- spopt/region/azp.py | 6 +- spopt/region/base.py | 4 +- spopt/region/maxp.py | 18 ++--- spopt/region/region_k_means.py | 3 - spopt/region/skater.py | 10 +-- spopt/region/spenc.py | 12 ++-- spopt/region/spenclib/abstracts.py | 112 +++++++++++++++++------------ spopt/tests/test_azp.py | 6 +- spopt/tests/test_clscp-so.py | 7 +- spopt/tests/test_lscp.py | 3 +- spopt/tests/test_lscpb.py | 2 +- spopt/tests/test_maxp.py | 18 +++-- spopt/tests/test_mclp.py | 5 +- spopt/tests/test_p_center.py | 3 +- spopt/tests/test_p_dispersion.py | 6 +- spopt/tests/test_p_median.py | 3 +- spopt/tests/test_random_regions.py | 4 +- spopt/tests/test_region_util.py | 1 - spopt/tests/test_spenc.py | 1 - spopt/tests/test_ward.py | 1 - 25 files changed, 133 insertions(+), 117 deletions(-) diff --git a/spopt/locate/base.py b/spopt/locate/base.py index f2ea60bb..d944782d 100644 --- a/spopt/locate/base.py +++ b/spopt/locate/base.py @@ -471,7 +471,6 @@ def add_predefined_facility_constraint( """ if hasattr(obj, "fac_vars"): fac_vars = getattr(obj, "fac_vars") - model = getattr(obj, "problem") for ind in range(len(predefined_fac)): if predefined_fac[ind]: fac_vars[ind].setInitialValue(1) diff --git a/spopt/locate/coverage.py b/spopt/locate/coverage.py index 41f5a072..34f965fc 100644 --- a/spopt/locate/coverage.py +++ b/spopt/locate/coverage.py @@ -424,11 +424,11 @@ def from_geodataframe( " geodataframe contains mixed type geometries or is not a point. Be " "sure deriving centroid from geometries doesn't affect the results." ) - if len(dem_type_geom) > 1 or not "Point" in dem_type_geom: + if len(dem_type_geom) > 1 or "Point" not in dem_type_geom: warnings.warn(f"Demand{_msg}", UserWarning) dem = dem.centroid - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn(f"Facility{_msg}", UserWarning) fac = fac.centroid @@ -866,11 +866,11 @@ def from_geodataframe( " geodataframe contains mixed type geometries or is not a point. Be " "sure deriving centroid from geometries doesn't affect the results." ) - if len(dem_type_geom) > 1 or not "Point" in dem_type_geom: + if len(dem_type_geom) > 1 or "Point" not in dem_type_geom: warnings.warn(f"Demand{_msg}", UserWarning) dem = dem.centroid - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn(f"Facility{_msg}", UserWarning) fac = fac.centroid @@ -1295,7 +1295,7 @@ def from_geodataframe( >>> mclp_from_geodataframe.perc_cov 100.0 - """ + """ # noqa E501 predefined_facilities_arr = None if predefined_facility_col is not None: @@ -1312,11 +1312,11 @@ def from_geodataframe( " geodataframe contains mixed type geometries or is not a point. Be " "sure deriving centroid from geometries doesn't affect the results." ) - if len(dem_type_geom) > 1 or not "Point" in dem_type_geom: + if len(dem_type_geom) > 1 or "Point" not in dem_type_geom: warnings.warn(f"Demand{_msg}", UserWarning) dem = dem.centroid - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn(f"Facility{_msg}", UserWarning) fac = fac.centroid diff --git a/spopt/locate/p_center.py b/spopt/locate/p_center.py index f4abbba6..56790b54 100644 --- a/spopt/locate/p_center.py +++ b/spopt/locate/p_center.py @@ -335,11 +335,11 @@ def from_geodataframe( " geodataframe contains mixed type geometries or is not a point. Be " "sure deriving centroid from geometries doesn't affect the results." ) - if len(dem_type_geom) > 1 or not "Point" in dem_type_geom: + if len(dem_type_geom) > 1 or "Point" not in dem_type_geom: warnings.warn(f"Demand{_msg}", UserWarning) dem = dem.centroid - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn(f"Facility{_msg}", UserWarning) fac = fac.centroid diff --git a/spopt/locate/p_dispersion.py b/spopt/locate/p_dispersion.py index 6ef5a9b0..47b45109 100644 --- a/spopt/locate/p_dispersion.py +++ b/spopt/locate/p_dispersion.py @@ -285,7 +285,7 @@ def from_geodataframe( fac_type_geom = fac.geom_type.unique() - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn( ( "Facility geodataframe contains mixed type geometries " diff --git a/spopt/locate/p_median.py b/spopt/locate/p_median.py index 74d13976..2fcb5e60 100644 --- a/spopt/locate/p_median.py +++ b/spopt/locate/p_median.py @@ -391,11 +391,11 @@ def from_geodataframe( " geodataframe contains mixed type geometries or is not a point. Be " "sure deriving centroid from geometries doesn't affect the results." ) - if len(dem_type_geom) > 1 or not "Point" in dem_type_geom: + if len(dem_type_geom) > 1 or "Point" not in dem_type_geom: warnings.warn(f"Demand{_msg}", UserWarning) dem = dem.centroid - if len(fac_type_geom) > 1 or not "Point" in fac_type_geom: + if len(fac_type_geom) > 1 or "Point" not in fac_type_geom: warnings.warn(f"Facility{_msg}", UserWarning) fac = fac.centroid diff --git a/spopt/region/azp.py b/spopt/region/azp.py index 4bafb218..e9af96d9 100755 --- a/spopt/region/azp.py +++ b/spopt/region/azp.py @@ -106,7 +106,9 @@ class AZP(BaseSpOptHeuristicSolver): Run the skater algorithm. - >>> model = AZP(mexico, w, attrs_name, n_clusters, allow_move_strategy, random_state) + >>> model = AZP( + ... mexico, w, attrs_name, n_clusters, allow_move_strategy, random_state + ... ) >>> model.solve() Get the region IDs for unit areas. @@ -436,7 +438,7 @@ def fit_from_dict( corresponding area is assigned to at the beginning of the algorithm. If None, then a random initial clustering will be generated. - objective_func : :class:`region.ObjectiveFunction`, default: ObjectiveFunctionPairwise() + objective_func : region.ObjectiveFunction, default: ObjectiveFunctionPairwise() Refer to the corresponding argument in :meth:`fit_from_scipy_sparse_matrix`. diff --git a/spopt/region/base.py b/spopt/region/base.py index 092e295c..504855a1 100644 --- a/spopt/region/base.py +++ b/spopt/region/base.py @@ -289,8 +289,8 @@ def infeasible_components(gdf, w, threshold_var, threshold): gb = gdf.groupby(by="_components").sum(numeric_only=True) gdf.drop(columns="_components", inplace=True) if gb[threshold_var].min() < threshold: - l = gb[gb[threshold_var] < threshold] - return l.index.values.tolist() + _l = gb[gb[threshold_var] < threshold] + return _l.index.values.tolist() return [] diff --git a/spopt/region/maxp.py b/spopt/region/maxp.py index fef30bec..7a23bb28 100644 --- a/spopt/region/maxp.py +++ b/spopt/region/maxp.py @@ -11,12 +11,10 @@ from ..BaseClass import BaseSpOptHeuristicSolver from scipy.spatial.distance import pdist, squareform -from scipy.spatial import KDTree from scipy.sparse.csgraph import connected_components -import libpysal import numpy as np from copy import deepcopy -from .base import infeasible_components, plot_components, modify_components +from .base import modify_components ITERCONSTRUCT = 999 ITERSA = 10 @@ -34,9 +32,10 @@ def maxp( verbose=False, policy="single", ): - """The max-p-regions involves the aggregation of n areas into an unknown maximum number of - homogeneous regions, while ensuring that each region is contiguous and satisfies a minimum - threshold value imposed on a predefined spatially extensive attribute. + """The max-p-regions involves the aggregation of n areas into an unknown maximum + number of homogeneous regions, while ensuring that each region is contiguous and + satisfies a minimum threshold value imposed on a predefined spatially extensive + attribute. Parameters ---------- @@ -48,7 +47,8 @@ def maxp( Weights object created from given data attrs_name : list, required - Strings for attribute names to measure similarity (cols of ``geopandas.GeoDataFrame``). + Strings for attribute names to measure similarity + (cols of ``geopandas.GeoDataFrame``). threshold_name : string, requied The name of the spatial extensive attribute variable. @@ -237,8 +237,8 @@ def construction_phase( regionSpatialAttr[C] = spatialAttrTotal num_regions = len(regionList) - for i, l in enumerate(labels): - if l == -1: + for i, _l in enumerate(labels): + if _l == -1: enclave.append(i) if num_regions < max_p: diff --git a/spopt/region/region_k_means.py b/spopt/region/region_k_means.py index 8de538be..87cf19ce 100644 --- a/spopt/region/region_k_means.py +++ b/spopt/region/region_k_means.py @@ -20,7 +20,6 @@ _centroid, _closest, _seeds, - is_neighbor, ) @@ -68,7 +67,6 @@ def region_k_means(X, n_clusters, w, drop_islands=True, seed=0): for i, seed in enumerate(seeds): label[seed] = i to_assign = areas[label == -1] - c = 0 while to_assign.size > 0: assignments = defaultdict(list) for rid in range(k): @@ -101,7 +99,6 @@ def region_k_means(X, n_clusters, w, drop_islands=True, seed=0): to_assign = areas[label == -1] # reassignment phase - changed = [] g = w_to_g(w) iters = 1 diff --git a/spopt/region/skater.py b/spopt/region/skater.py index 86a4a387..d1d79cb3 100755 --- a/spopt/region/skater.py +++ b/spopt/region/skater.py @@ -277,11 +277,11 @@ def score(self, data, labels=None, quorum=-np.inf): part_scores = [ self.reduction( self.metric( - data[labels == l], - self.center(data[labels == l], axis=0).reshape(1, -1), + data[labels == _l], + self.center(data[labels == _l], axis=0).reshape(1, -1), ) ) - for l in range(n_subtrees) + for _l in range(n_subtrees) ] return self.reduction(part_scores).item() @@ -506,7 +506,9 @@ class Skater(BaseSpOptHeuristicSolver): Show the clustering results. >>> chicago['skater_new'] = model.labels_ - >>> chicago.plot(column='skater_new', categorical=True, figsize=(12,8), edgecolor='w') + >>> chicago.plot( + ... column='skater_new', categorical=True, figsize=(12,8), edgecolor='w' + ... ) """ diff --git a/spopt/region/spenc.py b/spopt/region/spenc.py index c92f7805..edad0a01 100644 --- a/spopt/region/spenc.py +++ b/spopt/region/spenc.py @@ -101,9 +101,13 @@ def __init__( space. There are three ways to assign labels after the laplacian embedding: ``{'kmeans', 'discretize', 'hierarchical'}``: - * ``'kmeans'`` can be applied and is a popular choice. But it can also be sensitive to initialization. - * ``'discretize'`` is another approach which is less sensitive to random initialization, and which usually finds better clusters. - * ``'hierarchical'`` decomposition repeatedly bi-partitions the graph, instead of finding the decomposition all at once, as suggested in :cite:`shi_malik_2000`. + * ``'kmeans'`` can be applied and is a popular choice. But it can also be + sensitive to initialization. + * ``'discretize'`` is another approach which is less sensitive to random + initialization, and which usually finds better clusters. + * ``'hierarchical'`` decomposition repeatedly bi-partitions the graph, + instead of finding the decomposition all at once, as suggested in + :cite:`shi_malik_2000`. degree : float (default 3) Degree of the polynomial affinity kernel. Ignored by other kernels. @@ -156,7 +160,7 @@ def __init__( - :cite:`yu_shi_2003` Multiclass spectral clustering, 2003 Stella X. Yu, Jianbo Shi – https://doi.org/10.1109/ICCV.2003.1238361 - """ # noqa E402 + """ self.gdf = gdf self.w = w diff --git a/spopt/region/spenclib/abstracts.py b/spopt/region/spenclib/abstracts.py index e614c266..a68a0898 100644 --- a/spopt/region/spenclib/abstracts.py +++ b/spopt/region/spenclib/abstracts.py @@ -12,7 +12,6 @@ from .scores import boundary_fraction import scipy.sparse as spar from scipy.sparse import csgraph as cg, linalg as la -from warnings import warn as Warn class SPENC(clust.SpectralClustering): @@ -105,9 +104,13 @@ def __init__( space. There are three ways to assign labels after the laplacian embedding: ``{'kmeans', 'discretize', 'hierarchical'}``: - * ``'kmeans'`` can be applied and is a popular choice. But it can also be sensitive to initialization. - * ``'discretize'`` is another approach which is less sensitive to random initialization, and which usually finds better clusters. - * ``'hierarchical'`` decomposition repeatedly bi-partitions the graph, instead of finding the decomposition all at once, as suggested in :cite:`shi_malik_2000`. + * ``'kmeans'`` can be applied and is a popular choice. But it can also + be sensitive to initialization. + * ``'discretize'`` is another approach which is less sensitive to random + initialization, and which usually finds better clusters. + * ``'hierarchical'`` decomposition repeatedly bi-partitions the graph, + instead of finding the decomposition all at once, as suggested in + :cite:`shi_malik_2000`. degree : float (default 3) Degree of the polynomial affinity kernel. Ignored by other kernels. @@ -213,7 +216,8 @@ def fit( Whether or not to simply pipe down to the sklearn spectral clustering class. Will likely break the formal guarantees about contiguity/connectedness of solutions, due to the - standardizations/short cuts taken in sklearn.cluster.SpectralClustering + standardizations/short cuts taken in + sklearn.cluster.SpectralClustering check_W : bool, default True Whether or not to check that the spatial weights matrix is correctly formatted and aligns with the X matrix. @@ -226,18 +230,19 @@ def fit( if floor_weights are provided, floor should be a limit on the sum of floor weights for each region. floor_weights : np.ndarray of shape (n,), default np.ones((n,)) - array containing weights for each observation used to determine - the region floor. + array containing weights for each observation used to + determine the region floor. cut_method : str, default 'gridsearch' option governing what method to use to partition regions 1. "gridsearch" (default): the hierarchical grid search suggested by Shi & Malik (2000); search the second eigenvector for the "best" partition in terms of cut weight. - 2. "zero": cut the eigenvector at zero. Usually a passable solution, - since the second eigenvector is usually centered around zero. - 3. "median": cut the eigenvector through its median. This means the - regions will always be divided into two halves with equal numbers - of elemental units. + 2. "zero": cut the eigenvector at zero. Usually a + passable solution, since the second eigenvector is usually + centered around zero. + 3. "median": cut the eigenvector through its median. + This means the regions will always be divided into two + halves with equal numbers of elemental units. "gridsearch" may be slow when grid_resolution is large. "zero" is the best method for large data. @@ -248,13 +253,15 @@ def fit( I call this breakme because of bug8129. I don't see a significant difference here when switching between the two, most assignments in the problems I've examined are the same. - I think, since the bug is in the scaling of the eigenvectors, it's not super important. + I think, since the bug is in the scaling of the eigenvectors, + it's not super important. But, in the future, it may make sense to investigate whether the bug in sklearn is fully fixed, which would mean that any spectral clustering for a weights matrix in sklearn would always be contiguous. - """ + """ # noqa E501 + if np.isinf(self.n_clusters): self.assign_labels = "hierarchical" @@ -391,22 +398,23 @@ def _spectral_bipartition( the sum of floor weights for each region. (Default: 0) floor_weights : np.ndarray of shape (n,) - array containing weights for each observation used to determine - the region floor. - (Default: np.ones((n,))) + array containing weights for each observation used + to determine the region floor. (Default: np.ones((n,))) cut_method : str option governing what method to use to partition regions 1. "gridsearch" (default): the hierarchical grid search suggested by Shi & Malik (2000); search the second eigenvector for the "best" partition in terms of cut weight. - 2. "zero": cut the eigenvector at zero. Usually a passable solution, - since the second eigenvector is usually centered around zero. - 3. "median": cut the eigenvector through its median. This means the - regions will always be divided into two halves with equal numbers - of elemental units. + 2. "zero": cut the eigenvector at zero. Usually a + passable solution, since the second eigenvector is usually + centered around zero. + 3. "median": cut the eigenvector through its median. + This means the regions will always be divided into two + halves with equal numbers of elemental units. "gridsearch" may be slow when grid_resolution is large. "zero" is the best method for large data. """ + if floor_weights is None: floor_weights = np.ones((self.affinity_matrix_.shape[0],)) if spar.issparse(self.affinity_matrix_): @@ -471,7 +479,7 @@ def _make_hierarchical_cut( """ def mkobjective(second_eigenvector): - """This makes a closure around the objective function given an eigenvector""" + """Makes a closure around the objective function given an eigenvector.""" def objective(cutpoint): cut = second_eigenvector <= cutpoint @@ -556,26 +564,32 @@ def _sample_gen(self, W, n_samples=1, affinity="rbf", distribution=None, **fit_k """ NOTE: this is the lazy generator version of sample Compute random clusters using random eigenvector decomposition. - This uses random weights in spectral decomposition to generate approximately-evenly populated - random subgraphs from W. + This uses random weights in spectral decomposition to generate + approximately-evenly populated random subgraphs from W. Parameters ---------- W : np.ndarray or scipy.sparse matrix - matrix encoding the spatial relationships between observations in the frame. - Must be strictly binary & connected to result in connected graphs correct behavior. - Mathematical properties of randomregions are undefined if not. + matrix encoding the spatial relationships between + observations in the frame. Must be strictly binary & + connected to result in connected graphs correct behavior. + Mathematical properties of randomregions are + undefined if not. n_samples : int, default 1 integer describing how many samples to construct affinity : string or callable, default is 'rbf' - passed down to the underlying SPENC class when spectral spatial clusters are found. + passed down to the underlying SPENC class when spectral + spatial clusters are found. distribution : callable default is numpy.random.normal(0,1, size=(N,1)) - function when called with no arguments that draws the random weights used to + function when called with no arguments that draws + the random weights used to generate the random regions. Must align with W. spenc_parameters : keyword arguments - extra arguments passed down to the SPENC class for further customization. + extra arguments passed down to the SPENC class + for further customization. """ + if distribution is None: distribution = lambda: np.random.normal(0, 1, size=(W.shape[0], 1)) else: @@ -588,31 +602,36 @@ def _sample_gen(self, W, n_samples=1, affinity="rbf", distribution=None, **fit_k def sample(self, W, n_samples=1, distribution=None, **fit_kw): """ Compute random clusters using random eigenvector decomposition. - This uses random weights in spectral decomposition to generate approximately-evenly populated - random subgraphs from W. + This uses random weights in spectral decomposition to generate + approximately-evenly populated random subgraphs from W. Parameters ---------- W : np.ndarray or scipy.sparse matrix - matrix encoding the spatial relationships between observations in the frame. - Must be strictly binary & connected to result in connected graphs correct behavior. + matrix encoding the spatial relationships between + observations in the frame. Must be strictly binary & + connected to result in connected graphs correct behavior. Mathematical properties of randomregions are undefined if not. n_samples : int, default 1 integer describing how many samples to construct affinity : string or callable, default is 'rbf' - passed down to the underlying SPENC class when spectral spatial clusters are found. + passed down to the underlying SPENC class + when spectral spatial clusters are found. distribution : callable default is numpy.random.normal(0,1, size=(N,1)) - function when called with no arguments that draws the random weights used to + function when called with no arguments that + draws the random weights used to generate the random regions. Must align with W. fit_kw : keyword arguments - extra arguments passed down to the SPENC class for further customization. + extra arguments passed down to the SPENC + class for further customization. Returns ------- labels corresponding to the input W that are generated at random. - """ + """ # noqa E501 + result = np.vstack( [ labels @@ -652,21 +671,24 @@ def sample(self, n_samples=1, distribution=None): ---------- W : np.ndarray or scipy.sparse matrix - matrix encoding the spatial relationships between observations in the frame. - Must be strictly binary & connected to result in connected graphs correct behavior. - Mathematical properties of randomregions are undefined if not. + matrix encoding the spatial relationships between + observations in the frame. Must be strictly binary & + connected to result in connected graphs correct behavior. + Mathematical properties of randomregions + are undefined if not. n_samples : int integer describing how many samples to construct distribution : callable (default: np.random.normal(0,1)) - a function that, when called with no arguments, returns the weights - used as fake data to randomize the graph. + a function that, when called with no arguments, returns + the weights used as fake data to randomize the graph. Returns ------- labels corresponding to the input W that are generated at random. - """ + """ # noqa E501 + return np.vstack( [ labels diff --git a/spopt/tests/test_azp.py b/spopt/tests/test_azp.py index 28fcf438..c432ba47 100644 --- a/spopt/tests/test_azp.py +++ b/spopt/tests/test_azp.py @@ -1,9 +1,7 @@ import libpysal import geopandas import numpy -import pytest -import spopt from spopt.region import AZP @@ -25,7 +23,9 @@ def setup_method(self): # labels for: # n_clusters=3, simulated annealing AZP variant - # self.simann_from_w_labels = [0, 0, 0, 0, 0, 1, 2, 1, 1, 2, 2, 0, 2, 1, 1, 1, 1] + # self.simann_from_w_labels = [ + # 0, 0, 0, 0, 0, 1, 2, 1, 1, 2, 2, 0, 2, 1, 1, 1, 1 + # ] # self.simann_from_w_labels += [1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 2] def test_azp_basic_from_w(self): diff --git a/spopt/tests/test_clscp-so.py b/spopt/tests/test_clscp-so.py index ab86b5c9..8500a37e 100644 --- a/spopt/tests/test_clscp-so.py +++ b/spopt/tests/test_clscp-so.py @@ -1,16 +1,11 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel import numpy import geopandas -import pandas import pulp import spaghetti -from shapely.geometry import Point, Polygon from spopt.locate import LSCP from spopt.locate.util import simulated_geo_points import pytest -import warnings class TestSyntheticLocate: @@ -120,7 +115,7 @@ def test_clscpso_dem_gt_cap_error(self): with pytest.raises( ValueError, match="Infeasible model. Demand greater than capacity" ): - clscpso = LSCP.from_cost_matrix( + LSCP.from_cost_matrix( self.cost_matrix, service_radius, facility_capacity_arr=facility_capacity, diff --git a/spopt/tests/test_lscp.py b/spopt/tests/test_lscp.py index 947c2837..41db6e20 100644 --- a/spopt/tests/test_lscp.py +++ b/spopt/tests/test_lscp.py @@ -1,5 +1,4 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel +from spopt.locate.base import FacilityModelBuilder import numpy import geopandas import pandas diff --git a/spopt/tests/test_lscpb.py b/spopt/tests/test_lscpb.py index 171feeb8..d98b6a75 100644 --- a/spopt/tests/test_lscpb.py +++ b/spopt/tests/test_lscpb.py @@ -6,7 +6,7 @@ from shapely.geometry import Point, Polygon from spopt.locate import LSCPB -from spopt.locate.base import FacilityModelBuilder, LocateSolver +from spopt.locate.base import FacilityModelBuilder from spopt.locate.util import simulated_geo_points import os diff --git a/spopt/tests/test_maxp.py b/spopt/tests/test_maxp.py index 69c70693..2640ab0a 100644 --- a/spopt/tests/test_maxp.py +++ b/spopt/tests/test_maxp.py @@ -2,10 +2,14 @@ import libpysal import numpy import pytest -from shapely.geometry import Polygon, box +from shapely.geometry import box from spopt.region import MaxPHeuristic -from spopt.region.maxp import infeasible_components, modify_components, plot_components -from spopt.region.base import form_single_component +from spopt.region.base import ( + form_single_component, + infeasible_components, + modify_components, + plot_components, +) # Mexican states @@ -38,12 +42,12 @@ def setup_method(self): n_rows = 10 b = 0 h = w = 10 - component_0 = [box(l * w, b, l * w + w, b + h) for l in range(n_cols)] + component_0 = [box(_l * w, b, _l * w + w, b + h) for _l in range(n_cols)] b = b + h * 2 component_1 = [ - box(l * w, b + h * r, l * w + w, b + h + h * r) - for r in range(n_rows) - for l in range(n_cols) + box(_l * w, b + h * _r, _l * w + w, b + h + h * _r) + for _r in range(n_rows) + for _l in range(n_cols) ] geometries = component_0 + component_1 diff --git a/spopt/tests/test_mclp.py b/spopt/tests/test_mclp.py index 726bb1d2..00869243 100644 --- a/spopt/tests/test_mclp.py +++ b/spopt/tests/test_mclp.py @@ -1,5 +1,4 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel +from spopt.locate.base import FacilityModelBuilder import numpy import geopandas import pandas @@ -419,7 +418,7 @@ def test_attribute_error_add_facility_constraint(self): with pytest.raises(AttributeError, match="Before setting facility constraint"): dummy_class = MCLP("dummy", pulp.LpProblem("name")) dummy_p_facility = 1 - FacilityModelBuilder.add_facility_constraint(dummy_class, 1) + FacilityModelBuilder.add_facility_constraint(dummy_class, dummy_p_facility) def test_attribute_error_add_maximal_coverage_constraint(self): with pytest.raises( diff --git a/spopt/tests/test_p_center.py b/spopt/tests/test_p_center.py index ca0bf447..c489b948 100644 --- a/spopt/tests/test_p_center.py +++ b/spopt/tests/test_p_center.py @@ -1,5 +1,4 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel +from spopt.locate.base import FacilityModelBuilder import numpy import geopandas import pandas diff --git a/spopt/tests/test_p_dispersion.py b/spopt/tests/test_p_dispersion.py index 73669fc3..1cd34b9c 100644 --- a/spopt/tests/test_p_dispersion.py +++ b/spopt/tests/test_p_dispersion.py @@ -1,17 +1,15 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel +from spopt.locate.base import FacilityModelBuilder import numpy import geopandas import pandas import pulp import spaghetti -from shapely.geometry import Point, Polygon +from shapely.geometry import Polygon from spopt.locate import PDispersion from spopt.locate.util import simulated_geo_points import os -import pickle import platform import pytest diff --git a/spopt/tests/test_p_median.py b/spopt/tests/test_p_median.py index c5ca786b..07aa2faf 100644 --- a/spopt/tests/test_p_median.py +++ b/spopt/tests/test_p_median.py @@ -1,5 +1,4 @@ -from pyproj import crs -from spopt.locate.base import FacilityModelBuilder, LocateSolver, T_FacModel +from spopt.locate.base import FacilityModelBuilder import numpy import geopandas import pandas diff --git a/spopt/tests/test_random_regions.py b/spopt/tests/test_random_regions.py index 9229ba5f..49d62ccd 100644 --- a/spopt/tests/test_random_regions.py +++ b/spopt/tests/test_random_regions.py @@ -18,7 +18,7 @@ SYNTH_IDS = SYNTH_W.id_order -# Empirical tests ------------------------------------------------------------------------ +# Empirical tests --------------------------------------------------------------------- class TestRandomRegionEmpirical: def setup_method(self): self.mexico = MEXICO.copy() @@ -93,7 +93,7 @@ def test_random_regions_6_card(self): ) -# Synthetic tests ------------------------------------------------------------------------ +# Synthetic tests --------------------------------------------------------------------- class TestRandomRegionSynthetic: def setup_method(self): self.nregs = N_REGIONS diff --git a/spopt/tests/test_region_util.py b/spopt/tests/test_region_util.py index 59761dda..b68eb9f3 100644 --- a/spopt/tests/test_region_util.py +++ b/spopt/tests/test_region_util.py @@ -2,7 +2,6 @@ import geopandas import networkx import numpy -import pulp import pytest import spopt.region.util as util diff --git a/spopt/tests/test_spenc.py b/spopt/tests/test_spenc.py index 0435d611..583e8ce4 100644 --- a/spopt/tests/test_spenc.py +++ b/spopt/tests/test_spenc.py @@ -1,7 +1,6 @@ import geopandas import libpysal import numpy -import pytest from spopt.region import Spenc diff --git a/spopt/tests/test_ward.py b/spopt/tests/test_ward.py index 1bdfe1be..f4f750ed 100644 --- a/spopt/tests/test_ward.py +++ b/spopt/tests/test_ward.py @@ -1,7 +1,6 @@ import geopandas import libpysal import numpy -import pytest from spopt.region import WardSpatial From 13b0eb700f746984e135f6711ff5a07af6865f53 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sat, 25 Feb 2023 20:39:22 -0500 Subject: [PATCH 02/16] remove versioneer --- .pre-commit-config.yaml | 14 +- spopt/__init__.py | 9 +- spopt/_version.py | 693 ------------ versioneer.py | 2264 --------------------------------------- 4 files changed, 16 insertions(+), 2964 deletions(-) delete mode 100644 spopt/_version.py delete mode 100644 versioneer.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 11cc6c2f..e976cca4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,14 @@ +files: 'spopt\/' repos: -- repo: https://github.com/psf/black + - repo: https://github.com/psf/black rev: 23.1.0 hooks: - - id: black - language_version: python3 + - id: black + language_version: python3 + - repo: https://github.com/charliermarsh/ruff-pre-commit + rev: "v0.0.252" + hooks: + - id: ruff + +ci: + autofix_prs: false diff --git a/spopt/__init__.py b/spopt/__init__.py index 2e085e5d..c34df198 100644 --- a/spopt/__init__.py +++ b/spopt/__init__.py @@ -1,6 +1,7 @@ -from . import locate -from . import region +import contextlib +from importlib.metadata import PackageNotFoundError, version -from . import _version +from . import locate, region -__version__ = _version.get_versions()["version"] +with contextlib.suppress(PackageNotFoundError): + __version__ = version("spopt") diff --git a/spopt/_version.py b/spopt/_version.py deleted file mode 100644 index a8006fdd..00000000 --- a/spopt/_version.py +++ /dev/null @@ -1,693 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "v" - cfg.parentdir_prefix = "spopt-" - cfg.versionfile_source = "spopt/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen( - [command] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - **popen_kwargs, - ) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r"\d", r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r"\d", r): - continue - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - f"{tag_prefix}[[:digit:]]*", - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split("/"): - root = os.path.dirname(root) - except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index ccc663be..00000000 --- a/versioneer.py +++ /dev/null @@ -1,2264 +0,0 @@ -# Version: 0.28 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain (Unlicense) -* Compatible with: Python 3.7, 3.8, 3.9, 3.10 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in setuptools-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -Versioneer provides two installation modes. The "classic" vendored mode installs -a copy of versioneer into your repository. The experimental build-time dependency mode -is intended to allow you to skip this step and simplify the process of upgrading. - -### Vendored mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) - * Note that you will need to add `tomli; python_version < "3.11"` to your - build-time dependencies if you use `pyproject.toml` -* run `versioneer install --vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -### Build-time dependency mode - -* `pip install versioneer` to somewhere in your $PATH - * A [conda-forge recipe](https://github.com/conda-forge/versioneer-feedstock) is - available, so you can also use `conda install -c conda-forge versioneer` -* add a `[tool.versioneer]` section to your `pyproject.toml` or a - `[versioneer]` section to your `setup.cfg` (see [Install](INSTALL.md)) -* add `versioneer` (with `[toml]` extra, if configuring in `pyproject.toml`) - to the `requires` key of the `build-system` table in `pyproject.toml`: - ```toml - [build-system] - requires = ["setuptools", "versioneer[toml]"] - build-backend = "setuptools.build_meta" - ``` -* run `versioneer install --no-vendor` in your source tree, commit the results -* verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "master" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg` and `pyproject.toml`, if necessary, - to include any new configuration settings indicated by the release notes. - See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install --[no-]vendor` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the "Unlicense", as described in -https://unlicense.org/. - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" -# pylint:disable=invalid-name,import-outside-toplevel,missing-function-docstring -# pylint:disable=missing-class-docstring,too-many-branches,too-many-statements -# pylint:disable=raise-missing-from,too-many-lines,too-many-locals,import-error -# pylint:disable=too-few-public-methods,redefined-outer-name,consider-using-with -# pylint:disable=attribute-defined-outside-init,too-many-arguments - -import configparser -import errno -import json -import os -import re -import subprocess -import sys -from pathlib import Path -from typing import Callable, Dict -import functools - -have_tomllib = True -if sys.version_info >= (3, 11): - import tomllib -else: - try: - import tomli as tomllib - except ImportError: - have_tomllib = False - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ( - "Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND')." - ) - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir and "VERSIONEER_PEP518" not in globals(): - print( - "Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py) - ) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise OSError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - root = Path(root) - pyproject_toml = root / "pyproject.toml" - setup_cfg = root / "setup.cfg" - section = None - if pyproject_toml.exists() and have_tomllib: - try: - with open(pyproject_toml, "rb") as fobj: - pp = tomllib.load(fobj) - section = pp["tool"]["versioneer"] - except (tomllib.TOMLDecodeError, KeyError): - pass - if not section: - parser = configparser.ConfigParser() - with open(setup_cfg) as cfg_file: - parser.read_file(cfg_file) - parser.get("versioneer", "VCS") # raise error if missing - - section = parser["versioneer"] - - cfg = VersioneerConfig() - cfg.VCS = section["VCS"] - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""', None): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen( - [command] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - **popen_kwargs, - ) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY[ - "git" -] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. -# Generated by versioneer-0.28 -# https://github.com/python-versioneer/python-versioneer - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys -from typing import Callable, Dict -import functools - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY: Dict[str, str] = {} -HANDLERS: Dict[str, Dict[str, Callable]] = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - - popen_kwargs = {} - if sys.platform == "win32": - # This hides the console window if pythonw.exe is used - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - popen_kwargs["startupinfo"] = startupinfo - - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None), **popen_kwargs) - break - except OSError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, [ - "describe", "--tags", "--dirty", "--always", "--long", - "--match", f"{tag_prefix}[[:digit:]]*" - ], cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%%d.dev%%d" %% (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%%d" %% (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except OSError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "master". - tags = {r for r in refs if re.search(r"\d", r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r"\d", r): - continue - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - # GIT_DIR can interfere with correct operation of Versioneer. - # It may be intended to be passed to the Versioneer-versioned project, - # but that should not change where we get our version from. - env = os.environ.copy() - env.pop("GIT_DIR", None) - runner = functools.partial(runner, env=env) - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=not verbose) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - f"{tag_prefix}[[:digit:]]*", - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "master" in branches: - branch_name = "master" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - out, rc = runner(GITS, ["rev-list", "HEAD", "--left-right"], cwd=root) - pieces["distance"] = len(out.split()) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [versionfile_source] - if ipy: - files.append(ipy) - if "VERSIONEER_PEP518" not in globals(): - try: - my_path = __file__ - if my_path.endswith((".pyc", ".pyo")): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except OSError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.28) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except OSError: - raise NotThisMethod("unable to read _version.py") - mo = re.search( - r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) - if not mo: - mo = re.search( - r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S - ) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not master branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the master branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def pep440_split_post(ver): - """Split pep440 version string at the post-release segment. - - Returns the release segments before the post-release and the - post-release version number (or -1 if no post-release segment is present). - """ - vc = str.split(ver, ".post") - return vc[0], int(vc[1] or 0) if len(vc) == 2 else None - - -def render_pep440_pre(pieces): - """TAG[.postN.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - if pieces["distance"]: - # update the post release segment - tag_version, post_version = pep440_split_post(pieces["closest-tag"]) - rendered = tag_version - if post_version is not None: - rendered += ".post%d.dev%d" % (post_version + 1, pieces["distance"]) - else: - rendered += ".post0.dev%d" % (pieces["distance"]) - else: - # no commits, use the tag as the version - rendered = pieces["closest-tag"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not master branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "master": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert ( - cfg.versionfile_source is not None - ), "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to setuptools - from setuptools import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - - cmds["version"] = cmd_version - - # we override "build_py" in setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # pip install -e . and setuptool/editable_wheel will invoke build_py - # but the build_py command is not expected to copy any files. - - # we override different "build_py" commands for both environments - if "build_py" in cmds: - _build_py = cmds["build_py"] - else: - from setuptools.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - if getattr(self, "editable_mode", False): - # During editable installs `.py` and data files are - # not copied to build_lib - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - cmds["build_py"] = cmd_build_py - - if "build_ext" in cmds: - _build_ext = cmds["build_ext"] - else: - from setuptools.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if not cfg.versionfile_build: - return - target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) - if not os.path.exists(target_versionfile): - print( - f"Warning: {target_versionfile} does not exist, skipping " - "version update. This can happen if you are running build_ext " - "without first running build_py." - ) - return - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if "py2exe" in sys.modules: # py2exe enabled? - try: - from py2exe.setuptools_buildexe import py2exe as _py2exe - except ImportError: - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - cmds["py2exe"] = cmd_py2exe - - # sdist farms its file list building out to egg_info - if "egg_info" in cmds: - _egg_info = cmds["egg_info"] - else: - from setuptools.command.egg_info import egg_info as _egg_info - - class cmd_egg_info(_egg_info): - def find_sources(self): - # egg_info.find_sources builds the manifest list and writes it - # in one shot - super().find_sources() - - # Modify the filelist and normalize it - root = get_root() - cfg = get_config_from_root(root) - self.filelist.append("versioneer.py") - if cfg.versionfile_source: - # There are rare cases where versionfile_source might not be - # included by default, so we must be explicit - self.filelist.append(cfg.versionfile_source) - self.filelist.sort() - self.filelist.remove_duplicates() - - # The write method is hidden in the manifest_maker instance that - # generated the filelist and was thrown away - # We will instead replicate their final normalization (to unicode, - # and POSIX-style paths) - from setuptools import unicode_utils - - normalized = [ - unicode_utils.filesys_decode(f).replace(os.sep, "/") - for f in self.filelist.files - ] - - manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") - with open(manifest_filename, "w") as fobj: - fobj.write("\n".join(normalized)) - - cmds["egg_info"] = cmd_egg_info - - # we override different "sdist" commands for both environments - if "sdist" in cmds: - _sdist = cmds["sdist"] - else: - from setuptools.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file( - target_versionfile, self._versioneer_generated_versions - ) - - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (OSError, configparser.NoSectionError, configparser.NoOptionError) as e: - if isinstance(e, (OSError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write( - LONG - % { - "DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - } - ) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except OSError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -def setup_command(): - """Set up Versioneer and exit with appropriate error code.""" - errors = do_setup() - errors += scan_setup_py() - sys.exit(1 if errors else 0) - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - setup_command() From dfc641d2e48a2766796992948d4570d53a9f58d1 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sat, 25 Feb 2023 20:40:52 -0500 Subject: [PATCH 03/16] adopt pyproject.toml --- .coveragerc | 31 ----------- .gitignore | 5 +- MANIFEST.in | 3 -- pyproject.toml | 106 +++++++++++++++++++++++++++++++++++++ requirements.txt | 10 ---- requirements_dev.txt | 3 -- requirements_docs.txt | 6 --- requirements_notebooks.txt | 8 --- requirements_tests.txt | 5 -- setup.cfg | 7 --- setup.py | 71 ------------------------- 11 files changed, 110 insertions(+), 145 deletions(-) delete mode 100644 .coveragerc delete mode 100644 MANIFEST.in create mode 100644 pyproject.toml delete mode 100644 requirements.txt delete mode 100644 requirements_dev.txt delete mode 100644 requirements_docs.txt delete mode 100644 requirements_notebooks.txt delete mode 100644 requirements_tests.txt delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index 989a4f67..00000000 --- a/.coveragerc +++ /dev/null @@ -1,31 +0,0 @@ -# .coveragerc to control coverage.py -[run] -branch = True -source = spopt - -[report] -# Regexes for lines to exclude from consideration -exclude_lines = - # Have to re-enable the standard pragma - pragma: no cover - - # Don't complain about missing debug-only code: - def __repr__ - if self\.debug - - # Don't complain if tests don't hit defensive assertion code: - raise AssertionError - raise NotImplementedError - - # Don't complain if non-runnable code isn't run: - if 0: - if __name__ == .__main__.: - -ignore_errors = True -omit = - */tests/* - *__init__.py - *_version.py - -[html] -directory = coverage_html_report diff --git a/.gitignore b/.gitignore index 16979746..b3df74f8 100644 --- a/.gitignore +++ b/.gitignore @@ -25,4 +25,7 @@ lib lib64 __pycache__ -.coverage \ No newline at end of file +.coverage + +.ruff_cache +.pytest_cache diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 651ea795..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,3 +0,0 @@ -include LICENSE.txt MANIFEST.in requirements.txt requirements_docs.txt requirements_dev.txt requirements_tests.txt requirements_notebooks.txt -include versioneer.py -include spopt/_version.py diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..69d6fb5f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,106 @@ +[build-system] +requires = ["setuptools>=61.0", "setuptools_scm[toml]>=6.2"] +build-backend = "setuptools.build_meta" + +[tool.setuptools_scm] + +[project] +name = "spopt" +dynamic = ["version"] +authors = [ + { name = "James D. Gaboardi", email = "jgaboardi@gmail.com" }, + { name = "Xin Feng", email = "xin.feng@ucr.edu" }, +] + +maintainers = [{ name = "spopt contributors" }] +license = { text = "BSD 3-Clause" } +description = "Spatial Optimization in PySAL" +keywords = ["spatial optimization"] +readme = { text = """\ +Spopt is an open-source Python library for solving optimization problems with spatial data. Originating from the `region` module in `PySAL`_ (Python Spatial Analysis Library), it is under active development for the inclusion of newly proposed models and methods for regionalization, facility location, and transportation-oriented solutions. + +.. _PySAL: http://pysal.org +""", content-type = "text/x-rst" } +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Intended Audience :: Science/Research", + "Topic :: Scientific/Engineering :: GIS", +] +requires-python = ">=3.8" +dependencies = [ + "geopandas>=0.10.0", + "libpysal", + "mapclassify", + "networkx", + "numpy>=1.21", + "pandas>=1.0.5", + "pulp", + "scikit-learn>=0.22", + "scipy>=1.3.2", + "spaghetti", + "tqdm>=4.27.0", +] + + +[project.urls] +Home = "https://pysal.org/spopt/" +Repository = "https://github.com/pysal/spopt" + +[project.optional-dependencies] +tests = [ + "codecov", + "coverage", + "pytest", + "pytest-cov", + "pytest-xdist", +] +dev = ["pre-commit"] +docs = [ + "nbsphinx", + "numpydoc", + "pandoc", + "sphinx", + "sphinxcontrib-bibtex", + "sphinx_bootstrap_theme", +] +notebooks = [ + "folium", + "glpk", + "inequality", + "matplotlib", + "matplotlib-scalebar", + "overpy", + "routingpy", + "seaborn", + "watermark", +] + + +[tool.setuptools.packages.find] +include = ["spopt", "spopt.*"] + +[tool.black] +line-length = 88 + +[tool.ruff] +line-length = 88 +select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] +target-version = "py311" +ignore = ["B006", "F401", "F403"] +exclude = ["spopt/tests/*", "docs/*"] + +[tool.coverage.run] +source = ["spopt"] + +[tool.coverage.report] +exclude_lines = [ + "if self.debug:", + "pragma: no cover", + "raise NotImplementedError", + "except ModuleNotFoundError:", + "except ImportError", +] +ignore_errors = true +omit = ["spopt/tests/*", "docs/conf.py"] diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 816ccefb..00000000 --- a/requirements.txt +++ /dev/null @@ -1,10 +0,0 @@ -geopandas>=0.10.0 -libpysal -networkx -numpy>=1.21 -pandas>=1.0.5 -pulp -scikit-learn>=0.22 -scipy>=1.3.2 -spaghetti -tqdm>=4.27.0 diff --git a/requirements_dev.txt b/requirements_dev.txt deleted file mode 100644 index eb14f418..00000000 --- a/requirements_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ -black -pre-commit -tqdm>=4.27.0 \ No newline at end of file diff --git a/requirements_docs.txt b/requirements_docs.txt deleted file mode 100644 index c291980a..00000000 --- a/requirements_docs.txt +++ /dev/null @@ -1,6 +0,0 @@ -nbsphinx -numpydoc -pandoc -sphinx -sphinxcontrib-bibtex -sphinx_bootstrap_theme \ No newline at end of file diff --git a/requirements_notebooks.txt b/requirements_notebooks.txt deleted file mode 100644 index 7783a703..00000000 --- a/requirements_notebooks.txt +++ /dev/null @@ -1,8 +0,0 @@ -folium -glpk -inequality -matplotlib -matplotlib-scalebar -overpy -routingpy -seaborn \ No newline at end of file diff --git a/requirements_tests.txt b/requirements_tests.txt deleted file mode 100644 index dc476d1b..00000000 --- a/requirements_tests.txt +++ /dev/null @@ -1,5 +0,0 @@ -codecov -coverage -pytest -pytest-cov -pytest-xdist diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 81ae2143..00000000 --- a/setup.cfg +++ /dev/null @@ -1,7 +0,0 @@ -[versioneer] -VCS = git -style = pep440 -versionfile_source = spopt/_version.py -versionfile_build = spopt/_version.py -tag_prefix = v -parentdir_prefix = spopt- diff --git a/setup.py b/setup.py deleted file mode 100644 index aba8483d..00000000 --- a/setup.py +++ /dev/null @@ -1,71 +0,0 @@ -from setuptools import setup, find_packages -import versioneer - -package = "spopt" # name of package - -# Fetch README.md for the `long_description` -with open("README.md", "r", encoding="utf-8") as file: - long_description = file.read() - - -def _get_requirements_from_files(groups_files): - groups_reqlist = {} - - for k, v in groups_files.items(): - with open(v, "r") as f: - pkg_list = f.read().splitlines() - groups_reqlist[k] = pkg_list - - return groups_reqlist - - -def setup_package(): - _groups_files = { - "base": "requirements.txt", # basic requirements - "docs": "requirements_docs.txt", # requirements for building docs - "dev": "requirements_dev.txt", # requirements for development - "tests": "requirements_tests.txt", # requirements for testing - "tests": "requirements_notebooks.txt", # requirements for notebooks/binders - } - reqs = _get_requirements_from_files(_groups_files) - install_reqs = reqs.pop("base") - extras_reqs = reqs - - setup( - name=package, # needed by GitHub dependency graph - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), - description="Spatial Optimization in PySAL", - long_description=long_description, - long_description_content_type="text/markdown", - url="https://github.com/pysal/" + package, # github repo - download_url="https://pypi.org/project/" + package, - maintainer="PySAL Developers", - maintainer_email="xin.feng@ucr.edu, jgaboardi@gmail.com", - keywords="spatial optimization", - classifiers=[ - "Development Status :: 4 - Beta", - "Intended Audience :: Science/Research", - "Intended Audience :: Developers", - "Intended Audience :: Education", - "Topic :: Scientific/Engineering", - "Topic :: Scientific/Engineering :: GIS", - "License :: OSI Approved :: BSD License", - "Programming Language :: Python", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - ], - license="3-Clause BSD", - packages=find_packages(), - py_modules=[package], - install_requires=install_reqs, - extras_require=extras_reqs, - zip_safe=False, - python_requires=">=3.8", - ) - - -if __name__ == "__main__": - setup_package() From 7971097210f6b5a6214d3480b860ac00eb426eea Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sat, 25 Feb 2023 20:42:22 -0500 Subject: [PATCH 04/16] clean up envs, etc. --- .ci/310.yaml | 2 +- .ci/311-DEV.yaml | 2 +- .ci/311-DEV_shapely_dev.yaml | 23 --------------- .ci/311.yaml | 3 +- .ci/39.yaml | 2 +- .github/workflows/testing.yml | 3 +- .github/workflows/versioneer.yml | 50 -------------------------------- environment.yml | 3 +- 8 files changed, 7 insertions(+), 81 deletions(-) delete mode 100644 .ci/311-DEV_shapely_dev.yaml delete mode 100644 .github/workflows/versioneer.yml diff --git a/.ci/310.yaml b/.ci/310.yaml index 2b9d7dde..ca7df002 100644 --- a/.ci/310.yaml +++ b/.ci/310.yaml @@ -13,7 +13,7 @@ dependencies: - scipy>=1.3.2 - shapely - spaghetti - - tqdm=>4.27.0 + - tqdm>=4.27.0 # testing - codecov - coverage diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV.yaml index c0b334fe..ad2789de 100644 --- a/.ci/311-DEV.yaml +++ b/.ci/311-DEV.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - python=3.11 - - geopandas>=0.10.0 + - geopandas - matplotlib - networkx - numpy>=1.21 diff --git a/.ci/311-DEV_shapely_dev.yaml b/.ci/311-DEV_shapely_dev.yaml deleted file mode 100644 index 88d91900..00000000 --- a/.ci/311-DEV_shapely_dev.yaml +++ /dev/null @@ -1,23 +0,0 @@ -name: test -channels: - - conda-forge - - conda-forge/label/shapely_dev -dependencies: - - python=3.11 - - geopandas>=0.12.0 - - matplotlib - - networkx - - numpy>=1.21 - - pandas>=1.0.5 - - pip - - scikit-learn>=0.22 - - scipy>=1.3.2 - - shapely>=2.0b1 - - spaghetti - - tqdm>=4.27.0 - # testing - - codecov - - coverage - - pytest - - pytest-cov - - pytest-xdist diff --git a/.ci/311.yaml b/.ci/311.yaml index b1fbdb0e..b8867826 100644 --- a/.ci/311.yaml +++ b/.ci/311.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - python=3.11 - - geopandas>=0.10.0 + - geopandas - libpysal - matplotlib - networkx @@ -11,7 +11,6 @@ dependencies: - pandas>=1.0.5 - scikit-learn>=0.22 - scipy>=1.3.2 - - shapely - spaghetti - tqdm=>4.27.0 # testing diff --git a/.ci/39.yaml b/.ci/39.yaml index bce520b2..1722ca04 100644 --- a/.ci/39.yaml +++ b/.ci/39.yaml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - python=3.9 - - geopandas>=0.10.0 + - geopandas - libpysal - matplotlib - networkx diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 57504b5c..4ed604d6 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -16,7 +16,7 @@ runs-on: ${{ matrix.os }} timeout-minutes: 30 env: - RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --cov-config .coveragerc --cov-report xml --color yes --cov-append --cov-report term-missing + RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --cov-report xml --color yes --cov-append --cov-report term-missing strategy: matrix: os: [ubuntu-latest] @@ -26,7 +26,6 @@ .ci/310.yaml, .ci/311.yaml, .ci/311-DEV.yaml, - .ci/311-DEV_shapely_dev.yaml, ] include: - environment-file: .ci/311.yaml diff --git a/.github/workflows/versioneer.yml b/.github/workflows/versioneer.yml deleted file mode 100644 index 5005ee64..00000000 --- a/.github/workflows/versioneer.yml +++ /dev/null @@ -1,50 +0,0 @@ -# See https://github.com/python-versioneer/python-versioneer -name: "Update Versioneer" -on: - workflow_dispatch: - inputs: - version: - description: Manual Versioneer Run - default: test - required: false - schedule: - - cron: "0 6 1 * *" # 1st day of each month at 06:00 UTC - push: - paths: - - "setup.cfg" - - ".github/workflows/versioneer.yml" - -jobs: - versioneer: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - name: Install and run versioneer - run: | - pip install versioneer - versioneer install - - name: Blacken code - uses: psf/black@stable - with: - options: "--verbose" - - name: Upload artifact - uses: actions/upload-artifact@v3 - with: - path: versioneer.py - - name: Ignore changes in __init__ - run: | - git reset -- spopt/__init__.py - git checkout -- spopt/__init__.py - - name: Create PR - uses: peter-evans/create-pull-request@v4 - with: - title: "Update Versioneer" - branch: update-versioneer - base: main - commit-message: "[Bot] Update Versioneer" - - body: | - Automatic update of Versioneer by the `versioneer.yml` workflow. diff --git a/environment.yml b/environment.yml index e36adbd5..ebf32189 100644 --- a/environment.yml +++ b/environment.yml @@ -3,7 +3,7 @@ channels: - conda-forge dependencies: - python=3.10 - - geopandas>=0.10.0 + - geopandas>=0.12.0 - jupyterlab - libpysal - mapclassify @@ -14,6 +14,7 @@ dependencies: - pulp - scikit-learn>=0.22 - scipy>=1.3.2 + - shapely>=2.0 - tqdm>=4.27.0 # notebook/binder specific From 199d9cd276cfe052a9f64855711a71c557b2829f Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sat, 25 Feb 2023 20:44:41 -0500 Subject: [PATCH 05/16] update codecov --- codecov.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/codecov.yml b/codecov.yml index d587859b..3b714249 100644 --- a/codecov.yml +++ b/codecov.yml @@ -1,6 +1,6 @@ codecov: notify: - after_n_builds: 8 + after_n_builds: 7 coverage: range: 50..95 round: nearest @@ -18,5 +18,5 @@ coverage: comment: layout: "reach, diff, files" behavior: once - after_n_builds: 8 + after_n_builds: 7 require_changes: true From 31bd1caf97ec7cf1df726d90fe17159640fed73a Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Sun, 26 Feb 2023 16:51:36 -0500 Subject: [PATCH 06/16] test again master and main libpysal (temp) --- .ci/{311-DEV.yaml => 311-DEV-main.yaml} | 0 .ci/311-DEV-master.yaml | 21 +++++++++++++++++++++ .github/workflows/testing.yml | 14 ++++++++++---- 3 files changed, 31 insertions(+), 4 deletions(-) rename .ci/{311-DEV.yaml => 311-DEV-main.yaml} (100%) create mode 100644 .ci/311-DEV-master.yaml diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV-main.yaml similarity index 100% rename from .ci/311-DEV.yaml rename to .ci/311-DEV-main.yaml diff --git a/.ci/311-DEV-master.yaml b/.ci/311-DEV-master.yaml new file mode 100644 index 00000000..ad2789de --- /dev/null +++ b/.ci/311-DEV-master.yaml @@ -0,0 +1,21 @@ +name: test +channels: + - conda-forge +dependencies: + - python=3.11 + - geopandas + - matplotlib + - networkx + - numpy>=1.21 + - pandas>=1.0.5 + - pip + - scikit-learn>=0.22 + - scipy>=1.3.2 + - spaghetti + - tqdm>=4.27.0 + # testing + - codecov + - coverage + - pytest + - pytest-cov + - pytest-xdist diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 4ed604d6..cec91122 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -16,7 +16,7 @@ runs-on: ${{ matrix.os }} timeout-minutes: 30 env: - RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --cov-report xml --color yes --cov-append --cov-report term-missing + RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --cov-report xml --color yes --cov-report term-missing strategy: matrix: os: [ubuntu-latest] @@ -25,7 +25,8 @@ .ci/39.yaml, .ci/310.yaml, .ci/311.yaml, - .ci/311-DEV.yaml, + .ci/311-DEV-master.yaml, + .ci/311-DEV-main.yaml, ] include: - environment-file: .ci/311.yaml @@ -49,10 +50,15 @@ shell: bash -l {0} run: pip install pulp - - name: install bleeding edge libpysal (only Ubuntu w/ latest Python) + - name: install bleeding edge libpysal@master (only Ubuntu w/ latest Python) shell: bash -l {0} run: pip install git+https://github.com/pysal/libpysal.git@master - if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV') + if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV-master') + + - name: install bleeding edge libpysal@main (only Ubuntu w/ latest Python) + shell: bash -l {0} + run: pip install git+https://github.com/pysal/libpysal.git@main + if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV-main') - name: environment info shell: bash -l {0} From 693ea8c0417049a97d86129d784f5fe9ce8e1877 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Tue, 28 Feb 2023 14:53:51 -0500 Subject: [PATCH 07/16] only test dev against libpysal@main --- .ci/311-DEV-master.yaml | 21 -------------------- .ci/{311-DEV-main.yaml => 311-DEV.yaml} | 0 .github/workflows/testing.yml | 26 ++++++++++++++----------- 3 files changed, 15 insertions(+), 32 deletions(-) delete mode 100644 .ci/311-DEV-master.yaml rename .ci/{311-DEV-main.yaml => 311-DEV.yaml} (100%) diff --git a/.ci/311-DEV-master.yaml b/.ci/311-DEV-master.yaml deleted file mode 100644 index ad2789de..00000000 --- a/.ci/311-DEV-master.yaml +++ /dev/null @@ -1,21 +0,0 @@ -name: test -channels: - - conda-forge -dependencies: - - python=3.11 - - geopandas - - matplotlib - - networkx - - numpy>=1.21 - - pandas>=1.0.5 - - pip - - scikit-learn>=0.22 - - scipy>=1.3.2 - - spaghetti - - tqdm>=4.27.0 - # testing - - codecov - - coverage - - pytest - - pytest-cov - - pytest-xdist diff --git a/.ci/311-DEV-main.yaml b/.ci/311-DEV.yaml similarity index 100% rename from .ci/311-DEV-main.yaml rename to .ci/311-DEV.yaml diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index cec91122..04520cd2 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -16,7 +16,7 @@ runs-on: ${{ matrix.os }} timeout-minutes: 30 env: - RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --cov-report xml --color yes --cov-report term-missing + RUN_TEST: pytest spopt -v -r a -n auto --cov spopt --color yes --cov-report term-missing --cov-append --cov-report xml . strategy: matrix: os: [ubuntu-latest] @@ -25,8 +25,7 @@ .ci/39.yaml, .ci/310.yaml, .ci/311.yaml, - .ci/311-DEV-master.yaml, - .ci/311-DEV-main.yaml, + .ci/311-DEV.yaml, ] include: - environment-file: .ci/311.yaml @@ -48,17 +47,12 @@ - name: install pulp via pip shell: bash -l {0} - run: pip install pulp - - - name: install bleeding edge libpysal@master (only Ubuntu w/ latest Python) - shell: bash -l {0} - run: pip install git+https://github.com/pysal/libpysal.git@master - if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV-master') + run: pip install pulp - name: install bleeding edge libpysal@main (only Ubuntu w/ latest Python) shell: bash -l {0} run: pip install git+https://github.com/pysal/libpysal.git@main - if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV-main') + if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV') - name: environment info shell: bash -l {0} @@ -85,4 +79,14 @@ with: token: ${{ secrets.CODECOV_TOKEN }} file: ./coverage.xml - name: spot-codecov + name: spopt-codecov + + - name: Generate and publish the report + if: | + failure() + && steps.status.outcome == 'failure' + && github.event_name == 'schedule' + && github.repository_owner == 'pysal' + uses: xarray-contrib/issue-from-pytest-log@v1 + with: + log-path: pytest-log.jsonl From 614eb06d31fd05430f95f88424d881890b6c9992 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Tue, 28 Feb 2023 15:27:35 -0500 Subject: [PATCH 08/16] update other dev pins --- .ci/311-DEV.yaml | 8 +++++--- .github/workflows/testing.yml | 5 ----- 2 files changed, 5 insertions(+), 8 deletions(-) diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV.yaml index ad2789de..868ffe0f 100644 --- a/.ci/311-DEV.yaml +++ b/.ci/311-DEV.yaml @@ -3,15 +3,12 @@ channels: - conda-forge dependencies: - python=3.11 - - geopandas - matplotlib - - networkx - numpy>=1.21 - pandas>=1.0.5 - pip - scikit-learn>=0.22 - scipy>=1.3.2 - - spaghetti - tqdm>=4.27.0 # testing - codecov @@ -19,3 +16,8 @@ dependencies: - pytest - pytest-cov - pytest-xdist + - pip: + - git+https://github.com/geopandas/geopandas.git + - git+https://github.com/networkx/networkx.git + - git+https://github.com/pysal/libpysal.git + - git+https://github.com/pysal/spaghetti.git diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index 04520cd2..a9a724ba 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -48,11 +48,6 @@ - name: install pulp via pip shell: bash -l {0} run: pip install pulp - - - name: install bleeding edge libpysal@main (only Ubuntu w/ latest Python) - shell: bash -l {0} - run: pip install git+https://github.com/pysal/libpysal.git@main - if: matrix.os == 'ubuntu-latest' && contains(matrix.environment-file, 'DEV') - name: environment info shell: bash -l {0} From 219330428be769e5488c36fedc0117eab0cb19e1 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Tue, 28 Feb 2023 15:32:46 -0500 Subject: [PATCH 09/16] add mapclassify, etc. to dev env --- .ci/311-DEV.yaml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV.yaml index 868ffe0f..7eb7849e 100644 --- a/.ci/311-DEV.yaml +++ b/.ci/311-DEV.yaml @@ -3,6 +3,7 @@ channels: - conda-forge dependencies: - python=3.11 + - folium - matplotlib - numpy>=1.21 - pandas>=1.0.5 @@ -20,4 +21,5 @@ dependencies: - git+https://github.com/geopandas/geopandas.git - git+https://github.com/networkx/networkx.git - git+https://github.com/pysal/libpysal.git + - git+https://github.com/pysal/mapclassify.git - git+https://github.com/pysal/spaghetti.git From d18503ccb425285aabb884b0453680a97018700e Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Tue, 28 Feb 2023 15:41:46 -0500 Subject: [PATCH 10/16] ensure all deps are installed in dev --- .ci/311-DEV.yaml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV.yaml index 7eb7849e..a3daf75e 100644 --- a/.ci/311-DEV.yaml +++ b/.ci/311-DEV.yaml @@ -3,13 +3,18 @@ channels: - conda-forge dependencies: - python=3.11 + - geopandas - folium + - libpysal + - mapclassify - matplotlib + - networkx - numpy>=1.21 - pandas>=1.0.5 - pip - scikit-learn>=0.22 - scipy>=1.3.2 + - spaghetti - tqdm>=4.27.0 # testing - codecov From 57d1283b8105215885fbd47f23aa62e7c1d24240 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Tue, 28 Feb 2023 22:51:50 -0500 Subject: [PATCH 11/16] re-ruff repo --- pyproject.toml | 16 ++++++- spopt/locate/__init__.py | 4 +- spopt/locate/base.py | 5 +-- spopt/locate/coverage.py | 16 +++---- spopt/locate/p_center.py | 7 ++- spopt/locate/p_dispersion.py | 12 ++--- spopt/locate/p_median.py | 13 +++--- spopt/locate/util.py | 13 +++--- spopt/region/__init__.py | 8 ++-- spopt/region/azp.py | 44 +++++++++---------- spopt/region/azp_util.py | 4 +- spopt/region/base.py | 13 +++--- spopt/region/components.py | 4 +- spopt/region/csgraph_utils.py | 4 +- spopt/region/maxp.py | 53 ++++++++++++---------- spopt/region/objective_function.py | 2 +- spopt/region/random_region.py | 60 ++++++++++++------------- spopt/region/region_k_means.py | 10 +++-- spopt/region/skater.py | 30 ++++++------- spopt/region/spenclib/abstracts.py | 70 ++++++++++-------------------- spopt/region/spenclib/scores.py | 6 +-- spopt/region/spenclib/utils.py | 13 +++--- spopt/region/util.py | 30 ++++++------- spopt/region/ward.py | 2 +- spopt/tests/test_p_dispersion.py | 2 +- 25 files changed, 211 insertions(+), 230 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 69d6fb5f..35ea3faf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,11 +88,23 @@ line-length = 88 line-length = 88 select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] target-version = "py311" -ignore = ["B006", "F401", "F403"] +ignore = [ + "B006", + "B008", + "B009", + "B010", + "C408", + "E731", + "F401", + "F403", + "N803", + "N806", + "N999" +] exclude = ["spopt/tests/*", "docs/*"] [tool.coverage.run] -source = ["spopt"] +source = ["./spopt"] [tool.coverage.report] exclude_lines = [ diff --git a/spopt/locate/__init__.py b/spopt/locate/__init__.py index 59c74bbd..d18e9654 100644 --- a/spopt/locate/__init__.py +++ b/spopt/locate/__init__.py @@ -1,6 +1,6 @@ from .base import BaseOutputMixin, CoveragePercentageMixin, MeanDistanceMixin -from .coverage import LSCP, MCLP, LSCPB +from .coverage import LSCP, LSCPB, MCLP +from .p_center import PCenter from .p_dispersion import PDispersion from .p_median import PMedian -from .p_center import PCenter from .util import simulated_geo_points diff --git a/spopt/locate/base.py b/spopt/locate/base.py index d944782d..efd69d85 100644 --- a/spopt/locate/base.py +++ b/spopt/locate/base.py @@ -1,11 +1,10 @@ from abc import abstractmethod - -from ..BaseClass import BaseSpOptExactSolver from typing import TypeVar import numpy as np import pulp +from ..BaseClass import BaseSpOptExactSolver # https://coin-or.github.io/pulp/technical/constants.html#pulp.constants.LpStatus STATUS_CODES = { @@ -496,7 +495,7 @@ def add_facility_capacity_constraint( assigned to facility :math:`j` must be less than or equal to the capacity at facility :math:`j`. - :math:`a_i Z_{ij} \leq C_j X_j` + :math:`a_i Z_{ij} \\leq C_j X_j` n1_1 * fac_var1 + n1_2 * fac_var1 + ... + nij * fac_varj >= dem_var[i] diff --git a/spopt/locate/coverage.py b/spopt/locate/coverage.py index 34f965fc..d68a8e67 100644 --- a/spopt/locate/coverage.py +++ b/spopt/locate/coverage.py @@ -1,18 +1,17 @@ -import numpy as np +import warnings +import numpy as np import pulp from geopandas import GeoDataFrame +from scipy.spatial.distance import cdist from .base import ( + BackupPercentageMixinMixin, BaseOutputMixin, CoveragePercentageMixin, - BackupPercentageMixinMixin, - LocateSolver, FacilityModelBuilder, + LocateSolver, ) -from scipy.spatial.distance import cdist - -import warnings class LSCP(LocateSolver, BaseOutputMixin): @@ -1365,9 +1364,8 @@ def facility_client_array(self) -> None: array_cli = [] if fac_vars[j].value() > 0: for i in range(self.aij.shape[0]): - if cli_vars[i].value() > 0: - if self.aij[i, j] > 0: - array_cli.append(i) + if cli_vars[i].value() > 0 and self.aij[i, j] > 0: + array_cli.append(i) self.fac2cli.append(array_cli) diff --git a/spopt/locate/p_center.py b/spopt/locate/p_center.py index 56790b54..f351b767 100644 --- a/spopt/locate/p_center.py +++ b/spopt/locate/p_center.py @@ -1,12 +1,11 @@ -import numpy as np +import warnings +import numpy as np import pulp from geopandas import GeoDataFrame - -from .base import BaseOutputMixin, LocateSolver, FacilityModelBuilder from scipy.spatial.distance import cdist -import warnings +from .base import BaseOutputMixin, FacilityModelBuilder, LocateSolver class PCenter(LocateSolver, BaseOutputMixin): diff --git a/spopt/locate/p_dispersion.py b/spopt/locate/p_dispersion.py index 47b45109..d1f3f180 100644 --- a/spopt/locate/p_dispersion.py +++ b/spopt/locate/p_dispersion.py @@ -1,12 +1,11 @@ -import numpy as np +import warnings +import numpy as np import pulp from geopandas import GeoDataFrame - -from .base import LocateSolver, FacilityModelBuilder from scipy.spatial.distance import cdist -import warnings +from .base import FacilityModelBuilder, LocateSolver class PDispersion(LocateSolver): @@ -304,7 +303,7 @@ def from_geodataframe( distances, p_facilities, predefined_facilities_arr, name ) - def solve(self, solver: pulp.LpSolver, results: bool = True): + def solve(self, solver: pulp.LpSolver): """ Solve the ``PDispersion`` model. @@ -313,9 +312,6 @@ def solve(self, solver: pulp.LpSolver, results: bool = True): solver : pulp.LpSolver A solver supported by ``pulp``. - results : bool (default True) - If ``True`` it will create metainfo (which facilities cover - which demand) and vice-versa, and the uncovered demand. Returns ------- diff --git a/spopt/locate/p_median.py b/spopt/locate/p_median.py index 2fcb5e60..7e956731 100644 --- a/spopt/locate/p_median.py +++ b/spopt/locate/p_median.py @@ -1,18 +1,17 @@ -import numpy as np +import warnings +from typing import Union +import numpy as np import pulp from geopandas import GeoDataFrame +from scipy.spatial.distance import cdist from .base import ( BaseOutputMixin, - LocateSolver, FacilityModelBuilder, + LocateSolver, MeanDistanceMixin, ) -from scipy.spatial.distance import cdist - -from typing import Union -import warnings class PMedian(LocateSolver, BaseOutputMixin, MeanDistanceMixin): @@ -83,7 +82,7 @@ def __init__( name: str, problem: pulp.LpProblem, aij: np.array, - weights_sum: Union[int, float], + weights_sum: int | float, ): self.aij = aij self.ai_sum = weights_sum diff --git a/spopt/locate/util.py b/spopt/locate/util.py index 13ad5850..dec44acd 100644 --- a/spopt/locate/util.py +++ b/spopt/locate/util.py @@ -1,11 +1,12 @@ +from typing import Union + import geopandas import numpy -from shapely.geometry import Point, Polygon, MultiPolygon -from typing import Union +from shapely.geometry import MultiPolygon, Point, Polygon def simulated_geo_points( - in_data: Union[geopandas.GeoDataFrame, geopandas.GeoSeries, Polygon, MultiPolygon], + in_data: geopandas.GeoDataFrame | geopandas.GeoSeries | Polygon | MultiPolygon, needed: int = 1, seed: int = 0, ) -> geopandas.GeoDataFrame: @@ -67,13 +68,11 @@ def simulated_geo_points( raise ValueError(msg) # create single areal entity and isolate bounding box - if isinstance(in_data, geopandas.GeoDataFrame) or isinstance( - in_data, geopandas.GeoSeries - ): + if isinstance(in_data, (geopandas.GeoDataFrame, geopandas.GeoSeries)): geom = in_data.geometry.unary_union xmin, ymin, xmax, ymax = tuple(in_data.total_bounds) crs = in_data.crs - elif isinstance(in_data, Polygon) or isinstance(in_data, MultiPolygon): + elif isinstance(in_data, (Polygon, MultiPolygon)): geom = in_data xmin, ymin, xmax, ymax = in_data.bounds crs = None diff --git a/spopt/region/__init__.py b/spopt/region/__init__.py index 91ad6f13..bbc58b19 100644 --- a/spopt/region/__init__.py +++ b/spopt/region/__init__.py @@ -1,8 +1,8 @@ +from .azp import AZP +from .base import w_to_g from .maxp import MaxPHeuristic +from .random_region import RandomRegion, RandomRegions from .region_k_means import RegionKMeansHeuristic -from .base import w_to_g from .skater import Skater -from .ward import WardSpatial from .spenc import Spenc -from .azp import AZP -from .random_region import RandomRegion, RandomRegions +from .ward import WardSpatial diff --git a/spopt/region/azp.py b/spopt/region/azp.py index e9af96d9..04acba9a 100755 --- a/spopt/region/azp.py +++ b/spopt/region/azp.py @@ -3,41 +3,42 @@ Environment and Planning A, 27(3):425-446. """ -from ..BaseClass import BaseSpOptHeuristicSolver import abc -from collections import deque import math import random +from collections import deque -import numpy as np import networkx as nx +import numpy as np -from spopt.region.csgraph_utils import sub_adj_matrix, neighbors, is_connected -from spopt.region.objective_function import ObjectiveFunctionPairwise from spopt.region.azp_util import ( - AllowMoveStrategy, AllowMoveAZP, AllowMoveAZPSimulatedAnnealing, + AllowMoveStrategy, ) +from spopt.region.csgraph_utils import is_connected, neighbors, sub_adj_matrix +from spopt.region.objective_function import ObjectiveFunctionPairwise from spopt.region.util import ( + Move, array_from_df_col, array_from_dict_values, + array_from_graph_or_dict, assert_feasible, boolean_assert_feasible, copy_func, count, generate_initial_sol, make_move, - Move, pop_randomly_from, random_element_from, + scipy_sparse_matrix_from_dict, scipy_sparse_matrix_from_w, separate_components, w_from_gdf, - array_from_graph_or_dict, - scipy_sparse_matrix_from_dict, ) +from ..BaseClass import BaseSpOptHeuristicSolver + class AZP(BaseSpOptHeuristicSolver): """AZP involves class offering the implementation of @@ -147,8 +148,7 @@ def solve(self): data = self.gdf X = data[self.attrs_name].values - ########## - model = AZP_orig(self.allow_move_strategy, self.random_state) + model = AZPOrig(self.allow_move_strategy, self.random_state) model.fit_from_w( self.w, X, @@ -159,7 +159,7 @@ def solve(self): self.labels_ = model.labels_ -class AZP_orig: +class AZPOrig: """ Class offering the implementation of the AZP algorithm. @@ -456,7 +456,7 @@ def fit_from_dict( adj, attr_arr, n_regions, initial_labels, objective_func=objective_func ) - def _azp_connected_component(self, adj, initial_clustering, attr): + def _azp_connected_component(self, adj, initial_clustering, attr): # noqa ARG002 """ Implementation of the AZP algorithm for a spatially connected set of areas (i.e. for every area there is a path to every other area). @@ -527,10 +527,9 @@ def _azp_connected_component(self, adj, initial_clustering, attr): sub_adj = sub_adj_matrix( adj, np.where(labels == neigh_region)[0], wo_nodes=neigh ) - if is_connected(sub_adj): - # if area is alone in its region, it must stay - if count(labels, neigh_region) > 1: - candidates.append(neigh) + # if area is alone in its region, it must stay + if is_connected(sub_adj) and count(labels, neigh_region) > 1: + candidates.append(neigh) # step 5: randomly select zones from this list until either # there is a local improvement in the current value of the # objective function or a move that is equivalently as good @@ -558,14 +557,14 @@ def _azp_connected_component(self, adj, initial_clustering, attr): ) donor_region_areas = set(np.where(labels == donor)[0]) - not_donor_neighs_anymore = set( + not_donor_neighs_anymore = { area for area in neighs_of_cand if not any( a in donor_region_areas for a in neighbors(adj, area) ) - ) + } region_neighbors[donor].difference_update( not_donor_neighs_anymore ) @@ -862,9 +861,8 @@ def fit_from_scipy_sparse_matrix( ) initial_labels = self.azp.labels_ - if old_sol is not None: - if (old_sol == initial_labels).all(): - break + if old_sol is not None and (old_sol == initial_labels).all(): + break # added termination condition (not in Openshaw & Rao (1995)) if ( self.visited.count(tuple(initial_labels)) @@ -1207,7 +1205,7 @@ def _azp_connected_component(self, adj, initial_labels, attr): obj_val_start = float("inf") # step 12: Repeat steps 3-11 until either no further improvements are # made or a maximum number of iterations are exceeded. - for it in range(self.maxit): + for _it in range(self.maxit): obj_val_end = self.objective_func(labels, attr) if not obj_val_end < obj_val_start: break # step 12 diff --git a/spopt/region/azp_util.py b/spopt/region/azp_util.py index 150ff562..563d852d 100755 --- a/spopt/region/azp_util.py +++ b/spopt/region/azp_util.py @@ -7,7 +7,9 @@ class AllowMoveStrategy(abc.ABC): - def start_new_component(self, initial_labels, attr, objective_func, comp_idx): + def start_new_component( + self, initial_labels, attr, objective_func, comp_idx # noqa ARG002 + ): """ This method should be called whenever a new connected component is clustered. diff --git a/spopt/region/base.py b/spopt/region/base.py index 504855a1..842324ed 100644 --- a/spopt/region/base.py +++ b/spopt/region/base.py @@ -1,13 +1,14 @@ """Base classes and functions for spopt/region""" -import libpysal -import numpy import copy + +import libpysal import networkx +import numpy from scipy.spatial import KDTree -class RegionMixin(object): +class RegionMixin: """Mixin class for all region solvers.""" _solver_type = "regionalizer" @@ -373,7 +374,7 @@ def modify_components(gdf, w, threshold_var, threshold, policy="single"): gdf = gdf.iloc[keep_ids] cw = libpysal.weights.w_subset(w, keep_ids) new_neigh = {} - old_new = dict([(o, n) for n, o in enumerate(keep_ids)]) + old_new = {o: n for n, o in enumerate(keep_ids)} for old in keep_ids: new_key = old_new[old] new_neigh[new_key] = [old_new[j] for j in cw.neighbors[old]] @@ -426,6 +427,7 @@ def form_single_component(gdf, w, linkage="single"): zip( gdf.iloc[wcl == lcl].geometry.centroid.x, gdf.iloc[wcl == lcl].geometry.centroid.y, + strict=True, ) ) ) @@ -445,6 +447,7 @@ def form_single_component(gdf, w, linkage="single"): zip( gdf.iloc[wcl == cl].geometry.centroid.x, gdf.iloc[wcl == cl].geometry.centroid.y, + strict=True, ) ) dd, jj = tree.query(query_pnts, k=1) @@ -459,7 +462,7 @@ def form_single_component(gdf, w, linkage="single"): i = clas[min_idx] joins.append((i, j)) else: - pairs = zip(clas, jj) + pairs = zip(clas, jj, strict=True) joins.extend(list(pairs)) neighbors = copy.deepcopy(w.neighbors) diff --git a/spopt/region/components.py b/spopt/region/components.py index 21b2cb50..184167c4 100644 --- a/spopt/region/components.py +++ b/spopt/region/components.py @@ -32,7 +32,7 @@ def is_component(w, ids): """ components = 0 - marks = dict([(node, 0) for node in ids]) + marks = {node: 0 for node in ids} q = [] for node in ids: if marks[node] == 0: @@ -103,7 +103,7 @@ def check_contiguity(w, neighbors, leaver): return is_component(w, ids) -class Graph(object): +class Graph: def __init__(self, undirected=True): self.nodes = set() self.edges = {} diff --git a/spopt/region/csgraph_utils.py b/spopt/region/csgraph_utils.py index 9031fe03..beb2aef1 100755 --- a/spopt/region/csgraph_utils.py +++ b/spopt/region/csgraph_utils.py @@ -4,9 +4,9 @@ [compressed sparse graph routines]( https://docs.scipy.org/doc/scipy/reference/sparse.csgraph.html). """ +import numpy as np from scipy.sparse import csgraph as csg from scipy.sparse import csr_matrix -import numpy as np def is_connected(adj): @@ -43,7 +43,7 @@ def is_connected(adj): n_connected_components = csg.connected_components( adj, directed=False, return_labels=False ) - return True if n_connected_components == 1 else False + return n_connected_components == 1 def neighbors(adj, area): diff --git a/spopt/region/maxp.py b/spopt/region/maxp.py index 7a23bb28..a6efc9d8 100644 --- a/spopt/region/maxp.py +++ b/spopt/region/maxp.py @@ -8,12 +8,13 @@ __author__ = ["Ran Wei", "Serge Rey", "Elijah Knaap"] __email__ = "sjsrey@gmail.com" -from ..BaseClass import BaseSpOptHeuristicSolver +from copy import deepcopy -from scipy.spatial.distance import pdist, squareform -from scipy.sparse.csgraph import connected_components import numpy as np -from copy import deepcopy +from scipy.sparse.csgraph import connected_components +from scipy.spatial.distance import pdist, squareform + +from ..BaseClass import BaseSpOptHeuristicSolver from .base import modify_components ITERCONSTRUCT = 999 @@ -122,8 +123,8 @@ def maxp( label, regionList, regionSpatialAttr = rl if verbose: print(irl) - for saiter in range(max_iterations_sa): - finalLabel, finalRegionList, finalRegionSpatialAttr = performSA( + for _saiter in range(max_iterations_sa): + finalLabel, finalRegionList, finalRegionSpatialAttr = perform_sa( label, regionList, regionSpatialAttr, @@ -135,7 +136,7 @@ def maxp( tabuLength, max_no_move, ) - totalWithinRegionDistance = calculateWithinRegionDistance( + totalWithinRegionDistance = calculate_within_region_distance( finalRegionList, distance_matrix ) if verbose: @@ -153,7 +154,7 @@ def maxp( def construction_phase( arr, - attr, + attr, # noqa ARG001 threshold_array, distance_matrix, weight, @@ -216,7 +217,7 @@ def construction_phase( for arr_index in range(0, len(threshold_array)): P = arr[arr_index] - if not (labels[P] == 0): + if labels[P] != 0: continue NeighborPolys = deepcopy(weight.neighbors[P]) @@ -225,7 +226,7 @@ def construction_phase( labels[P] = -1 else: C += 1 - labeledID, spatialAttrTotal = growClusterForPoly( + labeledID, spatialAttrTotal = grow_cluster_for_poly( labels, threshold_array, P, NeighborPolys, C, weight, spatialThre ) @@ -245,7 +246,7 @@ def construction_phase( continue else: max_p = num_regions - maxp_labels, maxp_regionList, maxp_regionSpatialAttr = assignEnclave( + maxp_labels, maxp_regionList, maxp_regionSpatialAttr = assign_enclave( enclave, labels, regionList, @@ -267,7 +268,7 @@ def construction_phase( return real_values -def growClusterForPoly( +def grow_cluster_for_poly( labels, threshold_array, P, NeighborPolys, C, weight, spatialThre ): """Grow one region until threshold constraint is satisfied. @@ -329,7 +330,7 @@ def growClusterForPoly( return cluster_info -def assignEnclave( +def assign_enclave( enclave, labels, regionList, @@ -409,7 +410,7 @@ def assignEnclave( return region_info -def calculateWithinRegionDistance(regionList, distance_matrix): +def calculate_within_region_distance(regionList, distance_matrix): """Calculate total wthin-region distance/dissimilarity. Parameters @@ -430,7 +431,7 @@ def calculateWithinRegionDistance(regionList, distance_matrix): """ totalWithinRegionDistance = 0 - for k, v in regionList.items(): + for _k, v in regionList.items(): nv = np.array(v) regionDistance = distance_matrix[nv, :][:, nv].sum() / 2 totalWithinRegionDistance += regionDistance @@ -438,13 +439,13 @@ def calculateWithinRegionDistance(regionList, distance_matrix): return totalWithinRegionDistance -def pickMoveArea( - labels, +def pick_move_area( + labels, # noqa ARG001 regionLists, regionSpatialAttrs, threshold_array, weight, - distance_matrix, + distance_matrix, # noqa ARG001 threshold, ): """Pick a spatial unit that can move from one region to another. @@ -499,8 +500,14 @@ def pickMoveArea( return potentialAreas -def checkMove( - poa, labels, regionLists, threshold_array, weight, distance_matrix, threshold +def check_move( + poa, + labels, + regionLists, + threshold_array, # noqa ARG001 + weight, + distance_matrix, + threshold, # noqa ARG001 ): """Calculate the dissimilarity increase/decrease from one potential move. @@ -558,7 +565,7 @@ def checkMove( return move_info -def performSA( +def perform_sa( initLabels, initRegionList, initRegionSpatialAttr, @@ -627,7 +634,7 @@ def performSA( while ni_move_ct <= max_no_move: if len(potentialAreas) == 0: - potentialAreas = pickMoveArea( + potentialAreas = pick_move_area( labels, regionLists, regionSpatialAttrs, @@ -640,7 +647,7 @@ def performSA( if len(potentialAreas) == 0: break poa = potentialAreas[np.random.randint(len(potentialAreas))] - lostDistance, minAddedDistance, potentialMove = checkMove( + lostDistance, minAddedDistance, potentialMove = check_move( poa, labels, regionLists, diff --git a/spopt/region/objective_function.py b/spopt/region/objective_function.py index e747b99c..c07fded2 100755 --- a/spopt/region/objective_function.py +++ b/spopt/region/objective_function.py @@ -1,6 +1,6 @@ +import itertools from abc import ABC, abstractmethod -import itertools import numpy as np from spopt.region.util import get_metric_function diff --git a/spopt/region/random_region.py b/spopt/region/random_region.py index c61c26aa..747bccb5 100644 --- a/spopt/region/random_region.py +++ b/spopt/region/random_region.py @@ -142,7 +142,7 @@ def __init__( permutations=99, ): solutions = [] - for i in range(permutations): + for _i in range(permutations): solutions.append( RandomRegion( area_ids, @@ -287,27 +287,24 @@ def __init__( self.feasible = True # tests for input argument consistency - if cardinality: - if self.n != sum(cardinality): - self.feasible = False - raise ValueError( - f"Number of areas ({self.n}) does not match " - f"`cardinality` ({sum(cardinality)})." - ) - if contiguity: - if area_ids != contiguity.id_order: - self.feasible = False - raise ValueError( - "Order of `area_ids` must match order in `contiguity`. Inspect " - "the `area_ids` and `contiguity.id_order` input parameters." - ) - if num_regions and cardinality: - if num_regions != len(cardinality): - self.feasible = False - raise ValueError( - f"Number of regions ({num_regions}) does not match " - f"`cardinality` ({len(cardinality)})." - ) + if cardinality and self.n != sum(cardinality): + self.feasible = False + raise ValueError( + f"Number of areas ({self.n}) does not match " + f"`cardinality` ({sum(cardinality)})." + ) + if contiguity and area_ids != contiguity.id_order: + self.feasible = False + raise ValueError( + "Order of `area_ids` must match order in `contiguity`. Inspect " + "the `area_ids` and `contiguity.id_order` input parameters." + ) + if num_regions and cardinality and num_regions != len(cardinality): + self.feasible = False + raise ValueError( + f"Number of regions ({num_regions}) does not match " + f"`cardinality` ({len(cardinality)})." + ) # dispatches the appropriate algorithm if num_regions and cardinality and contiguity: @@ -357,7 +354,7 @@ def get_num_regions(self): return np.random.randint(2, self.n) def get_region_breaks(self, num_regions): - region_breaks = set([]) + region_breaks = set() while len(region_breaks) < num_regions - 1: region_breaks.add(np.random.randint(1, self.n - 1)) region_breaks = list(region_breaks) @@ -383,7 +380,7 @@ def cards2breaks(self, cards): region_breaks.pop() return region_breaks - def build_noncontig_regions(self, num_regions, region_breaks): + def build_noncontig_regions(self, num_regions, region_breaks): # noqa ARG002 start = 0 for i in region_breaks: self.regions.append(self.ids[start:i]) @@ -411,7 +408,7 @@ def grow_compact(self, w, test_card, region, candidates, potential): ) return region, candidates, potential - def grow_free(self, w, test_card, region, candidates, potential): + def grow_free(self, w, test_card, region, candidates, potential): # noqa ARG002 # increment potential areas after each new area is # added to the region (faster than the grow_compact) pot_index = np.random.randint(0, len(potential)) @@ -431,12 +428,9 @@ def grow_free(self, w, test_card, region, candidates, potential): def build_contig_regions( self, num_regions, cardinality, w, maxiter, compact, max_swaps ): - if compact: - grow_region = self.grow_compact - else: - grow_region = self.grow_free - iter = 0 - while iter < maxiter: + grow_region = self.grow_compact if compact else self.grow_free + _iter = 0 + while _iter < maxiter: # regionalization setup regions = [] size_pre = 0 @@ -530,9 +524,9 @@ def build_contig_regions( # regionalization failed self.ids = list(np.random.permutation(self.ids)) regions = [] - iter += 1 + _iter += 1 else: # regionalization successful self.feasible = True - iter = maxiter + _iter = maxiter self.regions = regions diff --git a/spopt/region/region_k_means.py b/spopt/region/region_k_means.py index 87cf19ce..cd80abab 100644 --- a/spopt/region/region_k_means.py +++ b/spopt/region/region_k_means.py @@ -10,16 +10,18 @@ from collections import defaultdict + import numpy + from ..BaseClass import BaseSpOptHeuristicSolver from .base import ( - w_to_g, - move_ok, - ok_moves, - region_neighbors, _centroid, _closest, _seeds, + move_ok, + ok_moves, + region_neighbors, + w_to_g, ) diff --git a/spopt/region/skater.py b/spopt/region/skater.py index d1d79cb3..346211b3 100755 --- a/spopt/region/skater.py +++ b/spopt/region/skater.py @@ -1,18 +1,19 @@ -from ..BaseClass import BaseSpOptHeuristicSolver - -from sklearn.metrics import pairwise as skm -from scipy.sparse import csgraph as cg -from scipy.optimize import OptimizeWarning -from collections import namedtuple -import time -import numpy as np import copy +import time import warnings +from collections import namedtuple + +import numpy as np +from scipy.optimize import OptimizeWarning +from scipy.sparse import csgraph as cg +from sklearn.metrics import pairwise as skm + +from ..BaseClass import BaseSpOptHeuristicSolver deletion = namedtuple("deletion", ("in_node", "out_node", "score")) -class SpanningForest(object): +class SpanningForest: def __init__( self, dissimilarity=skm.manhattan_distances, @@ -263,7 +264,7 @@ def score(self, data, labels=None, quorum=-np.inf): raise ValueError( "Labels not provided and ``MSF_Prune object`` " "has not been fit to data yet." - ) + ) from None assert data.shape[0] == len(labels), ( f"Length of label array ({labels.shape[0]}) " @@ -343,12 +344,12 @@ def find_cut( from tqdm.auto import tqdm except ImportError: - def tqdm(noop, desc=""): + def tqdm(noop, desc=""): # noqa ARG001 return noop else: - def tqdm(noop, desc=""): + def tqdm(noop, desc=""): # noqa ARG001 return noop zero_in = (labels is not None) and (target_label is not None) @@ -359,9 +360,8 @@ def tqdm(noop, desc=""): for in_node, out_node in tqdm( np.vstack(MSF.nonzero()).T, desc="finding cut..." ): # iterate over MSF edges - if zero_in: - if labels[in_node] != target_label: - continue + if zero_in and labels[in_node] != target_label: + continue local_MSF = copy.deepcopy(MSF) diff --git a/spopt/region/spenclib/abstracts.py b/spopt/region/spenclib/abstracts.py index a68a0898..ecb792ee 100644 --- a/spopt/region/spenclib/abstracts.py +++ b/spopt/region/spenclib/abstracts.py @@ -1,17 +1,19 @@ -from sklearn import cluster as clust +import numpy as np +import scipy.sparse as spar import sklearn.metrics as skm import sklearn.metrics.pairwise as pw -from sklearn.utils.validation import check_array -from .utils import check_weights -from sklearn.neighbors import kneighbors_graph -from sklearn.utils.extmath import _deterministic_vector_sign_flip -from sklearn.utils import check_random_state +from scipy.sparse import csgraph as cg +from scipy.sparse import linalg as la +from sklearn import cluster as clust from sklearn.cluster._spectral import discretize as _discretize +from sklearn.neighbors import kneighbors_graph from sklearn.preprocessing import LabelEncoder -import numpy as np +from sklearn.utils import check_random_state +from sklearn.utils.extmath import _deterministic_vector_sign_flip +from sklearn.utils.validation import check_array + from .scores import boundary_fraction -import scipy.sparse as spar -from scipy.sparse import csgraph as cg, linalg as la +from .utils import check_weights class SPENC(clust.SpectralClustering): @@ -186,14 +188,12 @@ def fit( self, X, W=None, - y=None, shift_invert=True, breakme=False, check_W=True, grid_resolution=100, floor=0, floor_weights=None, - cut_method="gridsearch", ): """Creates an affinity matrix for X using the selected affinity, applies W to the affinity elementwise, and then applies spectral clustering @@ -207,8 +207,6 @@ def fit( W : sparse or dense array, default None matrix expressing the pairwise spatial relationships between N observations. - y : sparse or dense array, default None - ignored, for scikit-learn class inheritance/regularity purposes. shift_invert : bool, default True boolean governing whether or not to use shift-invert trick to finding sparse eigenvectors @@ -232,19 +230,6 @@ def fit( floor_weights : np.ndarray of shape (n,), default np.ones((n,)) array containing weights for each observation used to determine the region floor. - cut_method : str, default 'gridsearch' - option governing what method to use to partition regions - 1. "gridsearch" (default): the hierarchical grid search - suggested by Shi & Malik (2000); search the second - eigenvector for the "best" partition in terms of cut weight. - 2. "zero": cut the eigenvector at zero. Usually a - passable solution, since the second eigenvector is usually - centered around zero. - 3. "median": cut the eigenvector through its median. - This means the regions will always be divided into two - halves with equal numbers of elemental units. - "gridsearch" may be slow when grid_resolution is large. - "zero" is the best method for large data. Notes ----- @@ -445,14 +430,13 @@ def _spectral_bipartition( ), "Indexing Error in cutting!" if ((left_cut * floor_weights).sum() > floor) & ( (right_cut * floor_weights).sum() > floor + ) and (tuple(left_cut) not in accepted_cuts) & ( + tuple(right_cut) not in accepted_cuts ): - if (tuple(left_cut) not in accepted_cuts) & ( - tuple(right_cut) not in accepted_cuts - ): - cuts.append(left_cut) - accepted_cuts.append(tuple(left_cut)) - cuts.append(right_cut) - accepted_cuts.append(tuple(right_cut)) + cuts.append(left_cut) + accepted_cuts.append(tuple(left_cut)) + cuts.append(right_cut) + accepted_cuts.append(tuple(right_cut)) discovered += 1 try: this_cut = cuts.pop(0) @@ -472,7 +456,6 @@ def _make_hierarchical_cut( affinity_matrix, grid_resolution, cut_method="median", - floor=0, ): """Compute a single hierarchical cut using one of the methods described in Shi and Malik (2000). @@ -560,7 +543,7 @@ def score( spatial_score = spatial_score(W, labels, X=X, **spatial_kw) return delta * attribute_score + (1 - delta) * spatial_score - def _sample_gen(self, W, n_samples=1, affinity="rbf", distribution=None, **fit_kw): + def _sample_gen(self, W, n_samples=1, distribution=None, **fit_kw): """ NOTE: this is the lazy generator version of sample Compute random clusters using random eigenvector decomposition. @@ -578,9 +561,6 @@ def _sample_gen(self, W, n_samples=1, affinity="rbf", distribution=None, **fit_k undefined if not. n_samples : int, default 1 integer describing how many samples to construct - affinity : string or callable, default is 'rbf' - passed down to the underlying SPENC class when spectral - spatial clusters are found. distribution : callable default is numpy.random.normal(0,1, size=(N,1)) function when called with no arguments that draws the random weights used to @@ -633,12 +613,11 @@ class for further customization. """ # noqa E501 result = np.vstack( - [ - labels - for labels in self._sample_gen( + list( + self._sample_gen( W, n_samples=n_samples, distribution=distribution, **fit_kw ) - ] + ) ) if n_samples == 1: result = result.flatten() @@ -690,10 +669,5 @@ def sample(self, n_samples=1, distribution=None): """ # noqa E501 return np.vstack( - [ - labels - for labels in self._sample_gen( - n_samples=n_samples, distribution=distribution - ) - ] + list(self._sample_gen(n_samples=n_samples, distribution=distribution)) ) diff --git a/spopt/region/spenclib/scores.py b/spopt/region/spenclib/scores.py index bca794cd..3b41072c 100644 --- a/spopt/region/spenclib/scores.py +++ b/spopt/region/spenclib/scores.py @@ -1,16 +1,16 @@ import numpy as np -def boundary_fraction(W, labels, X=None): +def boundary_fraction(W, labels): """""" boundary = 0 - for row, own_label in zip(W, labels): + for row, own_label in zip(W, labels, strict=True): neighbor_labels = labels[row.nonzero()[-1]] boundary += (neighbor_labels != own_label).any().astype(int) return boundary / W.shape[0] -def boundary_score(W, labels, X=None): +def boundary_score(W, labels): """ Returns a version of boundary_fraction unbounded on the negative end using the log of the fraction: diff --git a/spopt/region/spenclib/utils.py b/spopt/region/spenclib/utils.py index b6f284ba..c02c822f 100644 --- a/spopt/region/spenclib/utils.py +++ b/spopt/region/spenclib/utils.py @@ -1,10 +1,11 @@ -import scipy.sparse.csgraph as csg -import scipy.sparse as sp -from warnings import warn as Warn +from warnings import warn + import numpy as np +import scipy.sparse as sp +import scipy.sparse.csgraph as csg -def check_weights(W, X=None, transform=None): +def check_weights(W, X=None): if X is not None: assert ( W.shape[0] == X.shape[0] @@ -13,7 +14,7 @@ def check_weights(W, X=None, transform=None): graph.eliminate_zeros() components, labels = csg.connected_components(graph) if components > 1: - Warn( + warn( "Spatial affinity matrix is disconnected, and has {} subcomponents." "This will certainly affect the solution output." ) @@ -24,8 +25,8 @@ def lattice(x, y): """ Construct a lattice of unit squares of dimension (x,y) """ - from shapely.geometry import Polygon import geopandas as gpd + from shapely.geometry import Polygon x = np.arange(x) * 1.0 y = np.arange(y) * 1.0 diff --git a/spopt/region/util.py b/spopt/region/util.py index db4f50c7..de26faff 100755 --- a/spopt/region/util.py +++ b/spopt/region/util.py @@ -319,7 +319,7 @@ def array_from_region_list(region_list): """ n_areas = sum(len(region) for region in region_list) - labels = np.zeros((n_areas)) + labels = np.zeros(n_areas) for region_idx, region in enumerate(region_list): for area in region: labels[area] = region_idx @@ -445,7 +445,7 @@ def dataframe_to_dict(df, cols): True """ - return dict(zip(df.index, np.array(df[cols]))) + return dict(zip(df.index, np.array(df[cols]), strict=True)) def find_sublist_containing(el, lst, index=False): @@ -541,12 +541,12 @@ def get_metric_function(metric=None): return distance_metrics()[metric] except KeyError: accetpable_names = tuple( - name for name in distance_metrics().keys() if name != "precomputed" + name for name in distance_metrics() if name != "precomputed" ) raise ValueError( f"'{metric}' is not a known metric. Please use one " f"of the following metrics: {accetpable_names}." - ) + ) from None elif callable(metric): return metric else: @@ -556,12 +556,12 @@ def get_metric_function(metric=None): ) -class MissingMetric(RuntimeError): +class MissingMetricError(RuntimeError): """Raised when a distance metric is required but was not set.""" def raise_distance_metric_not_set(x, y): - raise MissingMetric("distance metric not set!") + raise MissingMetricError(f"distance metric not set! {x, y}") def make_move(moving_area, new_label, labels): @@ -821,11 +821,10 @@ def assert_feasible(solution, adj, n_regions=None): also if the number of regions is not equal to the ``n_regions`` argument. """ - if n_regions is not None: - if len(set(solution)) != n_regions: - raise ValueError( - f"The number of regions is {len(solution)} but should be {n_regions}." - ) + if n_regions is not None and len(set(solution)) != n_regions: + raise ValueError( + f"The number of regions is {len(solution)} but should be {n_regions}." + ) for region_label in set(solution): aux = sub_adj_matrix(adj, np.where(solution == region_label)[0]) @@ -839,11 +838,10 @@ def boolean_assert_feasible(solution, adj, n_regions=None): """Return boolean version of assert_feasible.""" resp = [] - if n_regions is not None: - if len(set(solution)) != n_regions: - raise ValueError( - f"The number of regions is {len(solution)} but should be {n_regions}." - ) + if n_regions is not None and len(set(solution)) != n_regions: + raise ValueError( + f"The number of regions is {len(solution)} but should be {n_regions}." + ) for region_label in set(solution): aux = sub_adj_matrix(adj, np.where(solution == region_label)[0]) diff --git a/spopt/region/ward.py b/spopt/region/ward.py index bd6be685..9b3fe457 100644 --- a/spopt/region/ward.py +++ b/spopt/region/ward.py @@ -83,7 +83,7 @@ def solve(self): n_clusters=self.n_clusters, connectivity=self.w.sparse, linkage="ward", - **self.clustering_kwds + **self.clustering_kwds, ) model.fit(X) self.labels_ = model.labels_ diff --git a/spopt/tests/test_p_dispersion.py b/spopt/tests/test_p_dispersion.py index 1cd34b9c..e5c53867 100644 --- a/spopt/tests/test_p_dispersion.py +++ b/spopt/tests/test_p_dispersion.py @@ -59,7 +59,7 @@ def test_p_dispersion_from_cost_matrix(self): def test_p_dispersion_from_cost_matrix_no_results(self): pdispersion = PDispersion.from_cost_matrix(self.cost_matrix, 2) - result = pdispersion.solve(pulp.PULP_CBC_CMD(msg=False), results=False) + result = pdispersion.solve(pulp.PULP_CBC_CMD(msg=False)) assert isinstance(result, PDispersion) with pytest.raises(AttributeError): From 577688a1510addd0592ed46b28fc1ca418a4da28 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Wed, 1 Mar 2023 12:21:00 -0500 Subject: [PATCH 12/16] ruff based on py39 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 35ea3faf..7a412bb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,7 @@ line-length = 88 [tool.ruff] line-length = 88 select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] -target-version = "py311" +target-version = "py39" ignore = [ "B006", "B008", From 961bf0d04a58e229c93f97bab68ca344794ddd72 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Wed, 1 Mar 2023 12:30:47 -0500 Subject: [PATCH 13/16] ruff based on py38 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7a412bb8..f9026bad 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,7 @@ line-length = 88 [tool.ruff] line-length = 88 select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] -target-version = "py39" +target-version = "py38" ignore = [ "B006", "B008", From 20051f566f6b8a8d98fb8eb706593a3866dec430 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Wed, 1 Mar 2023 19:21:56 -0500 Subject: [PATCH 14/16] resolve typing issues <3.9 [1] --- spopt/locate/p_median.py | 2 +- spopt/locate/util.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/spopt/locate/p_median.py b/spopt/locate/p_median.py index 7e956731..aab3ad7b 100644 --- a/spopt/locate/p_median.py +++ b/spopt/locate/p_median.py @@ -82,7 +82,7 @@ def __init__( name: str, problem: pulp.LpProblem, aij: np.array, - weights_sum: int | float, + weights_sum: Union[int, float], ): self.aij = aij self.ai_sum = weights_sum diff --git a/spopt/locate/util.py b/spopt/locate/util.py index dec44acd..a532fdef 100644 --- a/spopt/locate/util.py +++ b/spopt/locate/util.py @@ -6,7 +6,7 @@ def simulated_geo_points( - in_data: geopandas.GeoDataFrame | geopandas.GeoSeries | Polygon | MultiPolygon, + in_data: Union[geopandas.GeoDataFrame, geopandas.GeoSeries, Polygon, MultiPolygon], needed: int = 1, seed: int = 0, ) -> geopandas.GeoDataFrame: From e1b80db239b426751f296cc342266251869e5e74 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Wed, 1 Mar 2023 20:18:52 -0500 Subject: [PATCH 15/16] resolve typing issues <3.9 [2] --- pyproject.toml | 3 ++- spopt/region/base.py | 4 +--- spopt/region/spenclib/scores.py | 2 +- spopt/region/util.py | 2 +- 4 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index f9026bad..c5032eff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -99,7 +99,8 @@ ignore = [ "F403", "N803", "N806", - "N999" + "N999", + "UP007" ] exclude = ["spopt/tests/*", "docs/*"] diff --git a/spopt/region/base.py b/spopt/region/base.py index 842324ed..e1e2b141 100644 --- a/spopt/region/base.py +++ b/spopt/region/base.py @@ -427,7 +427,6 @@ def form_single_component(gdf, w, linkage="single"): zip( gdf.iloc[wcl == lcl].geometry.centroid.x, gdf.iloc[wcl == lcl].geometry.centroid.y, - strict=True, ) ) ) @@ -447,7 +446,6 @@ def form_single_component(gdf, w, linkage="single"): zip( gdf.iloc[wcl == cl].geometry.centroid.x, gdf.iloc[wcl == cl].geometry.centroid.y, - strict=True, ) ) dd, jj = tree.query(query_pnts, k=1) @@ -462,7 +460,7 @@ def form_single_component(gdf, w, linkage="single"): i = clas[min_idx] joins.append((i, j)) else: - pairs = zip(clas, jj, strict=True) + pairs = zip(clas, jj) joins.extend(list(pairs)) neighbors = copy.deepcopy(w.neighbors) diff --git a/spopt/region/spenclib/scores.py b/spopt/region/spenclib/scores.py index 3b41072c..a9896531 100644 --- a/spopt/region/spenclib/scores.py +++ b/spopt/region/spenclib/scores.py @@ -4,7 +4,7 @@ def boundary_fraction(W, labels): """""" boundary = 0 - for row, own_label in zip(W, labels, strict=True): + for row, own_label in zip(W, labels): neighbor_labels = labels[row.nonzero()[-1]] boundary += (neighbor_labels != own_label).any().astype(int) return boundary / W.shape[0] diff --git a/spopt/region/util.py b/spopt/region/util.py index de26faff..a0987517 100755 --- a/spopt/region/util.py +++ b/spopt/region/util.py @@ -445,7 +445,7 @@ def dataframe_to_dict(df, cols): True """ - return dict(zip(df.index, np.array(df[cols]), strict=True)) + return dict(zip(df.index, np.array(df[cols]))) def find_sublist_containing(el, lst, index=False): From af961f1e003c2fad038efdb566e872bea669a005 Mon Sep 17 00:00:00 2001 From: James Gaboardi Date: Thu, 2 Mar 2023 09:08:14 -0500 Subject: [PATCH 16/16] address review comments --- .ci/310.yaml | 4 ++++ .ci/311-DEV.yaml | 6 +++--- .ci/311.yaml | 5 +++++ .ci/38-MIN.yaml | 4 ++++ .ci/39.yaml | 4 ++++ .github/workflows/testing.yml | 10 +++++---- spopt/region/azp.py | 40 +++++++++++++++++------------------ 7 files changed, 46 insertions(+), 27 deletions(-) diff --git a/.ci/310.yaml b/.ci/310.yaml index ca7df002..aa37d995 100644 --- a/.ci/310.yaml +++ b/.ci/310.yaml @@ -20,3 +20,7 @@ dependencies: - pytest - pytest-cov - pytest-xdist + # with pip + - pip + - pip: + - pulp diff --git a/.ci/311-DEV.yaml b/.ci/311-DEV.yaml index a3daf75e..8dc09972 100644 --- a/.ci/311-DEV.yaml +++ b/.ci/311-DEV.yaml @@ -4,14 +4,11 @@ channels: dependencies: - python=3.11 - geopandas - - folium - libpysal - - mapclassify - matplotlib - networkx - numpy>=1.21 - pandas>=1.0.5 - - pip - scikit-learn>=0.22 - scipy>=1.3.2 - spaghetti @@ -22,7 +19,10 @@ dependencies: - pytest - pytest-cov - pytest-xdist + # with pip + - pip - pip: + - pulp - git+https://github.com/geopandas/geopandas.git - git+https://github.com/networkx/networkx.git - git+https://github.com/pysal/libpysal.git diff --git a/.ci/311.yaml b/.ci/311.yaml index b8867826..de883154 100644 --- a/.ci/311.yaml +++ b/.ci/311.yaml @@ -25,3 +25,8 @@ dependencies: - sphinx - sphinxcontrib-bibtex - sphinx_bootstrap_theme + # with pip + - pip + - pip: + - pulp + diff --git a/.ci/38-MIN.yaml b/.ci/38-MIN.yaml index de8c8159..1cfd8844 100644 --- a/.ci/38-MIN.yaml +++ b/.ci/38-MIN.yaml @@ -19,3 +19,7 @@ dependencies: - pytest - pytest-cov - pytest-xdist + # with pip + - pip + - pip: + - pulp diff --git a/.ci/39.yaml b/.ci/39.yaml index 1722ca04..a4917df1 100644 --- a/.ci/39.yaml +++ b/.ci/39.yaml @@ -20,3 +20,7 @@ dependencies: - pytest - pytest-cov - pytest-xdist + # with pip + - pip + - pip: + - pulp diff --git a/.github/workflows/testing.yml b/.github/workflows/testing.yml index a9a724ba..1013e5d8 100644 --- a/.github/workflows/testing.yml +++ b/.github/workflows/testing.yml @@ -44,10 +44,12 @@ environment-file: ${{ matrix.environment-file }} micromamba-version: 'latest' channel-priority: 'flexible' - - - name: install pulp via pip - shell: bash -l {0} - run: pip install pulp + + ############################################################################## + #- name: install pulp via pip + # shell: bash -l {0} + # run: pip install pulp + ############################################################################## - name: environment info shell: bash -l {0} diff --git a/spopt/region/azp.py b/spopt/region/azp.py index 04acba9a..d3ff3532 100755 --- a/spopt/region/azp.py +++ b/spopt/region/azp.py @@ -231,11 +231,11 @@ def fit_from_scipy_sparse_matrix( One-dimensional array of labels at the beginning of the algorithm. If ``None``, then a random initial clustering will be generated. Default is ``None``. - objective_func : region.objective_function.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) The objective function to use. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 if attr.ndim == 1: attr = attr.reshape(adj.shape[0], -1) @@ -296,12 +296,12 @@ def fit_from_w( initial_labels : numpy.ndarray or None Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``None``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 adj = scipy_sparse_matrix_from_w(w) self.fit_from_scipy_sparse_matrix( @@ -347,12 +347,12 @@ def fit_from_networkx( key area is assigned to at the beginning of the algorithm. If ``None``, then a random initial clustering will be generated. Default is ``None``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 adj = nx.to_scipy_sparse_matrix(graph) attr = array_from_graph_or_dict(graph, attr) @@ -398,12 +398,12 @@ def fit_from_geodataframe( Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``None``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 w = w_from_gdf(gdf, contiguity) attr = array_from_df_col(gdf, attr) self.fit_from_w( @@ -438,11 +438,11 @@ def fit_from_dict( corresponding area is assigned to at the beginning of the algorithm. If None, then a random initial clustering will be generated. - objective_func : region.ObjectiveFunction, default: ObjectiveFunctionPairwise() + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in :meth:`fit_from_scipy_sparse_matrix`. - """ + """ # noqa E501 sorted_areas = sorted(neighbor_dict) adj = scipy_sparse_matrix_from_dict(neighbor_dict) @@ -677,12 +677,12 @@ def fit_from_geodataframe( cooling_factor : float Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``0.85``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 w = w_from_gdf(gdf, contiguity) attr = array_from_df_col(gdf, attr) self.fit_from_w( @@ -721,12 +721,12 @@ def fit_from_dict( Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``0.85``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 sorted_areas = sorted(neighbor_dict) adj = scipy_sparse_matrix_from_dict(neighbor_dict) attr_arr = array_from_dict_values(attr, sorted_areas) @@ -774,12 +774,12 @@ def fit_from_networkx( Refer to the corresponding argument in ``AZP.fit_from_networkx``. Default is ``0.85``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``AZP.fit_from_networkx``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 adj = nx.to_scipy_sparse_matrix(graph) attr = array_from_graph_or_dict(graph, attr) @@ -824,11 +824,11 @@ def fit_from_scipy_sparse_matrix( Float :math:`\\in (0, 1)` specifying the cooling factor for the simulated annealing. Default is ``0.85``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``AZP.fit_from_scipy_sparse_matrix``. - """ + """ # noqa E501 if not (0 < cooling_factor < 1): raise ValueError( "The cooling_factor argument must be greater than 0 and less than 1" @@ -909,12 +909,12 @@ def fit_from_w( Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``0.85``. - objective_func : region.ObjectiveFunction + objective_func : :class:`region.ObjectiveFunction` (default ObjectiveFunctionPairwise()) Refer to the corresponding argument in ``fit_from_scipy_sparse_matrix``. Default is ``ObjectiveFunctionPairwise()``. - """ + """ # noqa E501 adj = scipy_sparse_matrix_from_w(w) self.fit_from_scipy_sparse_matrix( adj,