Skip to content

Commit 5f1a097

Browse files
committed
refactoring
1 parent f0507d5 commit 5f1a097

File tree

3 files changed

+15
-14
lines changed

3 files changed

+15
-14
lines changed

nusa_alphalens/__init__.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,11 +2,12 @@
22
from . import plotting
33
from . import tears
44
from . import utils
5+
from . import nusa
56

67
from ._version import get_versions
78

89

910
__version__ = get_versions()['version']
1011
del get_versions
1112

12-
__all__ = ['performance', 'plotting', 'tears', 'utils']
13+
__all__ = ['performance', 'plotting', 'tears', 'utils', 'nusa']

nusa_alphalens/nusa.py

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
1-
import pandas as pd
1+
import duckdb
22
import logging
33

4+
import pandas as pd
5+
46
from typing import List, Optional, Union
57

6-
import duckdb
78
import pandas_market_calendars as mcal
89

910
logging.basicConfig(format='%(message)s ::: %(asctime)s', datefmt='%I:%M:%S %p')
1011

11-
1212
#
1313
# Adjusting pricing data
1414
#
@@ -81,6 +81,7 @@ def _adjust_field(field: str, table: str) -> str:
8181

8282
return ''
8383

84+
8485
#
8586
# Adjusting data for universe
8687
#
@@ -140,7 +141,7 @@ def add_index_info(self, index_constitutes: pd.DataFrame, start_date: Union[pd.T
140141

141142
# will throw an error if there are duplicate self.__id_col
142143
_handle_duplicates(df=index_constitutes, out_type='ValueError', name='The column symbols',
143-
drop=False, subset=[self.__id_col])
144+
drop=False, subset=[self.__id_col])
144145

145146
# seeing if we have to convert from and thru to series of timestamps
146147
if date_format != '':
@@ -305,6 +306,11 @@ def _check_columns(needed: List[str], df: pd.DataFrame, index_columns: bool = Tr
305306
return df
306307

307308

309+
#
310+
# utility
311+
#
312+
313+
308314
def _handle_duplicates(df: pd.DataFrame, out_type: str, name: str, drop: bool = False,
309315
subset: List[any] = None) -> pd.DataFrame:
310316
"""
@@ -339,10 +345,3 @@ def _handle_duplicates(df: pd.DataFrame, out_type: str, name: str, drop: bool =
339345

340346
if drop:
341347
return df
342-
343-
344-
if __name__ == '__main__':
345-
df = pd.read_csv(
346-
'/Users/alex/Desktop/WRDS/CRSP/Annual Update/Stock : Security Files/Daily Stock File/Daily Stock File 29251231-20211231.gz',
347-
nrows=1000).drop('cfacshr', axis=1)
348-
print(adjust_crsp_data(df))

setup.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@
3434
if __name__ == "__main__":
3535
setup(
3636
name='nusa_alphalens',
37-
version=versioneer.get_version(),
38-
cmdclass=versioneer.get_cmdclass(),
37+
version='0.5.0',#versioneer.get_version(),
38+
#cmdclass=versioneer.get_cmdclass(),
3939
description='Performance analysis of predictive (alpha) stock factors',
4040
author='Quantopian Inc.',
4141
author_email='opensource@quantopian.com',
@@ -61,4 +61,5 @@
6161
url='https://github.com/quantopian/alphalens',
6262
install_requires=install_reqs,
6363
extras_require=extra_reqs,
64+
download_url='https://github.com/Northeastern-Systematic-Alpha/alphalens/archive/refs/tags/v0.5.0.tar.gz'
6465
)

0 commit comments

Comments
 (0)