bullm commited on
Commit
06773c7
·
1 Parent(s): 7d766a1

Upload companies.py

Browse files
Files changed (1) hide show
  1. apps/companies.py +322 -3
apps/companies.py CHANGED
@@ -1,3 +1,322 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a3b9f15051f8a9b2d45d029fc5b4dc55ebe2801e3f35ee377096b8fc641fa4e7
3
- size 13398
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Created on Tue Apr 26 17:38:54 2022
4
+
5
+ @author: bullm
6
+ """
7
+
8
+ import streamlit as st
9
+ from modules import tables
10
+ import pandas as pd
11
+ from streamlit_echarts import st_echarts
12
+ from Data.credentials import credentials_s3 as creds3
13
+ import boto3
14
+ import io
15
+ import pybase64 as base64
16
+ import matplotlib.pyplot as plt
17
+
18
+
19
+ @st.experimental_memo
20
+ def get_asset_field(id_quant, start, field='IQ_CLOSEPRICE_ADJ', expand=True,
21
+ rename=['asset']):
22
+ asset_obj = tables.EquityMaster(asset=id_quant, field=field)
23
+ asset_df = asset_obj.query(rename=rename, start=start, expand=expand)
24
+ return pd.DataFrame(asset_df)
25
+
26
+
27
+ @st.experimental_memo
28
+ def get_macro_field(country, start, instrument="INDEX", expand=True,
29
+ rename=['country']):
30
+ asset_obj = tables.MacroMaster(country=country, instrument=instrument)
31
+ asset_df = asset_obj.query(rename=rename, start=start, expand=expand)
32
+ return pd.DataFrame(asset_df)
33
+
34
+
35
+ def plot_returns(id_quant, country, start):
36
+ asset_df = get_asset_field(id_quant, start)
37
+ index_df = get_macro_field(country, start)
38
+ asset_df = asset_df.merge(index_df, how='left',
39
+ left_index=True,
40
+ right_index=True)
41
+ x = asset_df.index
42
+ y2 = asset_df[id_quant]/asset_df.iloc[0][id_quant] - 1
43
+ y1= (1 + asset_df[country]).cumprod() - 1
44
+ plt.figure(figsize=(10, 5))
45
+ plt.rcParams['axes.facecolor'] = '#EAEAEA'
46
+ plt.rcParams['figure.facecolor'] = '#EAEAEA'
47
+ plt.fill_between(x, y1, y2, where=y2 >y1, facecolor='green', alpha=0.5)
48
+ plt.fill_between(x, y1, y2, where=y2 <=y1, facecolor='red', alpha=0.5)
49
+ plt.xticks(rotation=60)
50
+ plt.title('Asset vs Benchmark')
51
+ st.pyplot(plt, height='300')
52
+
53
+
54
+ def get_ebitda(id_quant):
55
+ ebitda_df = get_asset_field(id_quant, '2021-01-01', field='IQ_EBITDA', expand=True,
56
+ rename=['asset'])
57
+ ebitda_actual = round(ebitda_df.iloc[-1][id_quant], 2)
58
+ ebitda_anterior = round(ebitda_df.iloc[-2][id_quant], 2)
59
+ delta = round(ebitda_actual - ebitda_anterior,2)
60
+ st.metric("Ebitda " + ebitda_df.index[-1].strftime("%Y-%m-%d"), ebitda_actual, delta)
61
+
62
+
63
+
64
+
65
+
66
+
67
+
68
+
69
+
70
+
71
+ @st.experimental_memo
72
+ def get_asset_field(id_quant, field, start, expand=False, rename=['asset', 'field']):
73
+ asset_obj = tables.EquityMaster(asset=id_quant, field=field)
74
+ asset_df = asset_obj.query(rename=rename, start=start, expand=expand)
75
+ return pd.DataFrame(asset_df)
76
+
77
+ @st.experimental_memo
78
+ def get_macro_field(country, instrument, start, expand=True, rename=['country']):
79
+ asset_obj = tables.MacroMaster(country=country, instrument=instrument)
80
+ asset_df = asset_obj.query(rename=rename, start=start, expand=expand)
81
+ return pd.DataFrame(asset_df)
82
+
83
+ def get_dict_companies():
84
+ company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx",
85
+ sheet_name='Compilado')
86
+ company_id_dict = dict(zip(company_base_df["Ticker"], company_base_df["ID_Quant"]))
87
+ return company_id_dict
88
+ # asset = data_daily[field][id_quant]
89
+
90
+ def read_itub():
91
+ itub_df = pd.read_csv('C:/Users/bullm/Desktop/ITUB.csv')
92
+ itub_df.index = pd.to_datetime(itub_df["Date"])
93
+ itub_cs_s = itub_df["Adj Close"]
94
+ st.line_chart(itub_cs_s)
95
+
96
+
97
+ def company_info():
98
+ st.set_page_config(layout="wide", page_title="Portal LVAM",
99
+ page_icon="img/icono.png")
100
+ st.sidebar.write("Companies")
101
+
102
+ company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx",
103
+ sheet_name='Compilado')
104
+ col1, col2 = st.columns((1, 1.681))
105
+ companies_id_dict = get_dict_companies()
106
+ tickers = col2.multiselect("Seleccionasr Empresa",
107
+ company_base_df["Ticker"],
108
+ ["ITUB4"])
109
+ country = col2.multiselect("Seleccionasr Empresa",
110
+ company_base_df["Portfolio_Country"].unique(),
111
+ ["Brazil"])
112
+ id_quants= [str(companies_id_dict[ticker]) for ticker in tickers]
113
+ fields_ls= ["IQ_CLOSEPRICE_ADJ", "IQ_MARKETCAP"]
114
+ field = col1.selectbox("Selecione un campo", fields_ls)
115
+ start = '2020-01-01'
116
+ df = get_asset_field(id_quants, field, start, rename=['asset'])
117
+ df = df.ffill(axis=0)
118
+ tickers = list(tickers)
119
+ company_id_dict = dict(zip(company_base_df["Ticker"], company_base_df["ID_Quant"]))
120
+ id_company_dict = dict(zip(company_base_df["ID_Quant"], company_base_df["Ticker"]))
121
+ df.columns = [id_company_dict[int(col)] for col in df.columns]
122
+ st.title('Cierre Ajustado Mongo Quant')
123
+ col1, col2, col3 = st.columns(3)
124
+ mm2 = col2.checkbox("Indice Pais")
125
+ mm3 = col3.checkbox("Indice Sector")
126
+ if len(tickers) == 1:
127
+ mm = col1.checkbox("Medias moviles")
128
+ rollings = [20,60,240]
129
+ dicc_mm = {
130
+ tickers[0] + f' {x}':df[tickers[0]].rolling(x).mean() for x in rollings
131
+ }
132
+ df2 =pd.concat(dicc_mm.values(), keys=dicc_mm.keys(), axis=1)
133
+ df = pd.concat([df, df2], axis=1)
134
+ if mm2:
135
+ mc_df = (1+get_macro_field(country, "INDEX", start)).cumprod()
136
+ df = pd.concat([df, mc_df], axis=1).ffill(axis=0)
137
+ df = df.iloc[len(df) - 252: ]
138
+
139
+ else:
140
+ df = df.iloc[len(df) - 252: ]
141
+ if not mm and not mm2:
142
+ st.write(df)
143
+ st.line_chart(df[df.columns[0]])
144
+ elif not mm and mm2:
145
+ df = df[[df.columns[0],df.columns[-1]]]/df.iloc[0][[df.columns[0],df.columns[-1]]]
146
+ st.write(df)
147
+ st.line_chart(df)
148
+ else:
149
+ st.write(df)
150
+ st.line_chart(df)
151
+ if len(tickers) > 1:
152
+ if mm2:
153
+ mc_df = (1+get_macro_field(country, "INDEX", start)).cumprod()
154
+ df = pd.concat([df, mc_df], axis=1).ffill(axis=0)
155
+ if mm3:
156
+ mc_df = (1+get_macro_field(country, "Banks_INDEX", start)).cumprod()
157
+ df = pd.concat([df, mc_df], axis=1).ffill(axis=0)
158
+ df = df.iloc[len(df)-252:]
159
+ # st.write(df.iloc[0])
160
+ # st.write(df.iloc[-1])
161
+
162
+ st.line_chart(df/df.iloc[0]) #/df.iloc[0]-1)
163
+
164
+ import json
165
+
166
+ def save_index(list_assets, titulo):
167
+ with open('Data/index.json', 'r') as json_file:
168
+ json_object = json.load(json_file)
169
+
170
+
171
+ json_object[titulo] = list_assets
172
+ with open('Data/index.json', 'w') as outfile:
173
+ json.dump(json_object, outfile)
174
+ outfile.close()
175
+
176
+ @st.experimental_memo
177
+ def read_scoring():
178
+ key = creds3["S3_KEY_ID"]
179
+ secret_key = creds3["S3_SECRET_KEY"]
180
+ bucket = creds3["S3_BUCKET"]
181
+ path ="scoring.xlsx"
182
+ scoring = read_excel_s3(key, secret_key, bucket, path)
183
+ return scoring
184
+
185
+ def read_excel_s3(key, secret_key, bucket, path):
186
+ s3_client = boto3.client('s3', aws_access_key_id = key, aws_secret_access_key= secret_key)
187
+ response = s3_client.get_object(Bucket=bucket, Key=path)
188
+ data = response["Body"].read()
189
+ df = pd.read_excel(io.BytesIO(data), engine='openpyxl')
190
+ return df
191
+
192
+
193
+ def get_table_excel_link(df, name):
194
+ towrite = io.BytesIO()
195
+ writer = pd.ExcelWriter(towrite, engine='xlsxwriter')
196
+ downloaded_file = df.to_excel(writer, encoding='utf-8', index=True,
197
+ header=True)
198
+ workbook = writer.book
199
+ worksheet = writer.sheets["Sheet1"]
200
+ #set the column width as per your requirement
201
+ worksheet.set_column('A:BZ', 18)
202
+ writer.save()
203
+ towrite.seek(0) # reset pointer
204
+ file_name = name+'.xlsx'
205
+ style = 'style="color:black;text-decoration: none; font-size:18px;" '
206
+ name_mark = name
207
+ b64 = base64.b64encode(towrite.read()).decode() # some strings
208
+ linko = f'<center><a href="data:application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;base64,{b64}" '+style+'download="'+file_name+'"><button>'+name_mark+'</button></a></center>'
209
+ return linko
210
+
211
+
212
+
213
+
214
+
215
+
216
+ def index_constructor():
217
+ try:
218
+ company_base_df = pd.read_excel("Data/Company_Base_Definitivo.xlsx",
219
+ sheet_name='Compilado')
220
+ scoring = read_scoring()[["Ticker", "Large/Small", "Market_Cap", "ADTV"]]
221
+ company_base_df = company_base_df.merge(scoring, how='left', on='Ticker')
222
+ col1, col2, col3, col4 = st.columns(4)
223
+ country = col1.selectbox("Country",["All", "Chile", "Brazil", "Mexico", "Peru", "Colombia"])
224
+ large_small = col2.selectbox("Large/Small", ["All", "Large", "Small"])
225
+ start = col3.text_input('Date', '2022-01')
226
+ field1 = col4.selectbox("Field", ['IQ_CLOSEPRICE_ADJ', 'IQ_PBV'])
227
+ if col1.checkbox("Filtro por Mkt Cap"):
228
+ mkt_cap = col2.number_input("Mkt Cap Min", value=1000)
229
+ company_base_df = company_base_df[company_base_df["Market_Cap"]>mkt_cap]
230
+ if col3.checkbox("Filtro por ADTV"):
231
+ adtv = col4.number_input("ADTV Min", value=1)
232
+ company_base_df = company_base_df[company_base_df["ADTV"]>adtv]
233
+ if country != "All":
234
+ company_base_df = company_base_df[company_base_df["Portfolio_Country"]==country]
235
+ if large_small != "All":
236
+ company_base_df = company_base_df[company_base_df["Large/Small"]==large_small]
237
+ if st.checkbox("Seleccionar todos"):
238
+ tickers = st.multiselect("Seleccionar Empresa",
239
+ company_base_df["Ticker"],
240
+ company_base_df["Ticker"])
241
+ else:
242
+ tickers = st.multiselect("Seleccionasr Empresa2",
243
+ company_base_df["Ticker"],)
244
+ if len(tickers)> 0:
245
+ titulo = col1.text_input("Titulo")
246
+ save_index = col2.button("Save Index")
247
+ if save_index:
248
+ save_index(tickers, titulo)
249
+ companies_id_dict = dict(zip(company_base_df["Ticker"],
250
+ company_base_df["ID_Quant"]))
251
+ id_company_dict = dict(zip(company_base_df["ID_Quant"],
252
+ company_base_df["Ticker"]))
253
+ id_quants = [str(companies_id_dict[ticker]) for ticker in tickers]
254
+
255
+ field = get_asset_field(id_quants,
256
+ field1,
257
+ start,
258
+ expand=False,
259
+ rename=['asset'])
260
+ ccy = tables.MacroMaster(instrument='FX_USD',
261
+ currency='CLP').query(start=start)
262
+ if field1 == 'IQ_CLOSEPRICE_ADJ':
263
+ rets = field.pct_change() # field.mul(ccy, axis=0).pct_change()
264
+ else:
265
+ rets = field.ffill(0)
266
+ mkt_cap = get_asset_field(id_quants,
267
+ 'IQ_MARKETCAP',
268
+ start,
269
+ expand=False,
270
+ rename=['asset']).ffill(0)
271
+ weights = mkt_cap.div(mkt_cap.sum(axis=1), axis=0).shift(1)
272
+
273
+ if field1 == 'IQ_CLOSEPRICE_ADJ':
274
+ st.line_chart((1 +(rets * weights).sum(axis=1)).cumprod()-1)
275
+ bm = (1 +(rets * weights).sum(axis=1)).cumprod()-1
276
+ else:
277
+ st.line_chart((rets * weights).sum(axis=1))
278
+ bm =(rets * weights).sum(axis=1)
279
+ company_id_dict = dict(zip(company_base_df["Ticker"],
280
+ company_base_df["ID_Quant"]))
281
+ id_company_dict = dict(zip(company_base_df["ID_Quant"],
282
+ company_base_df["Ticker CIQ"]))
283
+ weights.columns = [id_company_dict[int(col)] for col in weights.columns]
284
+ rets.columns = [id_company_dict[int(col)] for col in rets.columns]
285
+ index = (1+get_macro_field('Chile', "INDEX", start)).cumprod()
286
+ col1, col2, col3, col4 = st.columns(4)
287
+ col1.markdown(get_table_excel_link(index, "Index"),
288
+ unsafe_allow_html=True)
289
+ col2.markdown(get_table_excel_link(weights, "Weights"),
290
+ unsafe_allow_html=True)
291
+ col3.markdown(get_table_excel_link(rets, "Retornos"),
292
+ unsafe_allow_html=True)
293
+ col4.markdown(get_table_excel_link(bm, "bm"), unsafe_allow_html=True)
294
+
295
+ except Exception as exc:
296
+ st.write(exc)
297
+
298
+
299
+ def pca(rets):
300
+ from sklearn.decomposition import PCA
301
+ import numpy as np
302
+ st.header('PCA')
303
+ pca = PCA(n_components=10)
304
+ rets_arr = np.array(rets.fillna(0))
305
+ rets_df = pd.DataFrame(rets_arr, columns = rets.columns, index= rets.index)
306
+ st.subheader('Retornos')
307
+ st.write(rets_df)
308
+ retorno_factores_arr = pca.fit_transform(rets_arr)
309
+ weights = pd.DataFrame(pca.components_, columns = rets.columns)
310
+ st.subheader('Weights')
311
+ st.write(weights)
312
+ ret_factor_fin = pd.DataFrame(retorno_factores_arr, index= rets.index)
313
+ st.subheader('Retornos Factores')
314
+ st.write(ret_factor_fin)
315
+ col1, col2 = st.columns(2)
316
+ st.write(pca.explained_variance_ratio_)
317
+ st.write(pca.explained_variance_ratio_.cumsum())
318
+
319
+ col1.markdown(get_table_excel_link(weights, "Weights"),
320
+ unsafe_allow_html=True)
321
+ col2.markdown(get_table_excel_link(ret_factor_fin, "Retornos PCA"),
322
+ unsafe_allow_html=True)