Skip to content

Commit

Permalink
Ran ruff linter
Browse files Browse the repository at this point in the history
  • Loading branch information
ronaldokun committed Aug 22, 2023
1 parent 10dc178 commit 2e751a3
Show file tree
Hide file tree
Showing 19 changed files with 716 additions and 189 deletions.
232 changes: 232 additions & 0 deletions environment_ubuntu.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,232 @@
name: dados
channels:
- fastai
- conda-forge
- defaults
dependencies:
- _libgcc_mutex=0.1
- _openmp_mutex=4.5
- aiohttp=3.8.5
- aiosignal=1.3.1
- anyio=3.7.1
- asttokens=2.2.1
- astunparse=1.6.3
- async-timeout=4.0.3
- attrs=23.1.0
- aws-c-auth=0.7.3
- aws-c-cal=0.6.1
- aws-c-common=0.9.0
- aws-c-compression=0.2.17
- aws-c-event-stream=0.3.1
- aws-c-http=0.7.11
- aws-c-io=0.13.32
- aws-c-mqtt=0.9.3
- aws-c-s3=0.3.14
- aws-c-sdkutils=0.1.12
- aws-checksums=0.1.17
- aws-crt-cpp=0.23.0
- aws-sdk-cpp=1.10.57
- backcall=0.2.0
- backports=1.0
- backports.functools_lru_cache=1.6.5
- black=23.7.0
- brotli=1.0.9
- brotli-bin=1.0.9
- brotli-python=1.0.9
- bzip2=1.0.8
- c-ares=1.19.1
- ca-certificates=2023.7.22
- certifi=2023.7.22
- cffi=1.15.1
- charset-normalizer=3.2.0
- click=8.1.7
- colorama=0.4.6
- comm=0.1.4
- contourpy=1.1.0
- cryptography=41.0.3
- cycler=0.11.0
- dacite=1.8.0
- dataclasses=0.8
- debugpy=1.6.8
- decorator=5.1.1
- dnspython=2.4.2
- exceptiongroup=1.1.3
- execnb=0.1.5
- executing=1.2.0
- fastcore=1.5.29
- fastprogress=1.0.3
- fonttools=4.42.1
- freetype=2.12.1
- frozenlist=1.4.0
- geographiclib=1.52
- geopy=2.3.0
- gflags=2.2.2
- ghapi=1.0.4
- glog=0.6.0
- h11=0.14.0
- h2=4.1.0
- hpack=4.0.0
- htmlmin=0.1.12
- httpcore=0.17.3
- hyperframe=6.0.1
- idna=3.4
- imagehash=4.3.1
- importlib-metadata=6.8.0
- importlib_metadata=6.8.0
- ipykernel=6.25.1
- ipython=8.14.0
- ipywidgets=8.1.0
- jedi=0.19.0
- jinja2=3.1.2
- joblib=1.3.2
- jupyter_client=8.3.0
- jupyter_core=5.3.1
- jupyterlab_widgets=3.0.8
- keyutils=1.6.1
- kiwisolver=1.4.4
- krb5=1.21.2
- lcms2=2.15
- ld_impl_linux-64=2.40
- lerc=4.0.0
- libabseil=20230125.3
- libarrow=12.0.1
- libblas=3.9.0
- libbrotlicommon=1.0.9
- libbrotlidec=1.0.9
- libbrotlienc=1.0.9
- libcblas=3.9.0
- libcrc32c=1.1.2
- libcurl=8.2.1
- libdeflate=1.18
- libedit=3.1.20221030
- libev=4.33
- libevent=2.1.12
- libexpat=2.5.0
- libffi=3.4.2
- libgcc-ng=13.1.0
- libgfortran-ng=13.1.0
- libgfortran5=13.1.0
- libgomp=13.1.0
- libgoogle-cloud=2.12.0
- libgrpc=1.56.2
- libiconv=1.17
- libjpeg-turbo=2.1.5.1
- liblapack=3.9.0
- libnghttp2=1.52.0
- libnsl=2.0.0
- libnuma=2.0.16
- libopenblas=0.3.23
- libpng=1.6.39
- libprotobuf=4.23.3
- libsodium=1.0.18
- libsqlite=3.42.0
- libssh2=1.11.0
- libstdcxx-ng=13.1.0
- libthrift=0.18.1
- libtiff=4.5.1
- libutf8proc=2.8.0
- libuuid=2.38.1
- libwebp-base=1.3.1
- libxcb=1.15
- libzlib=1.2.13
- lz4-c=1.9.4
- markdown-it-py=3.0.0
- markupsafe=2.1.3
- matplotlib-base=3.7.2
- matplotlib-inline=0.1.6
- mdurl=0.1.0
- multidict=6.0.4
- multimethod=1.4
- munkres=1.1.4
- mypy_extensions=1.0.0
- nbdev=2.3.12
- ncurses=6.4
- nest-asyncio=1.5.6
- networkx=3.1
- numpy=1.23.5
- openjpeg=2.5.0
- openssl=3.1.2
- orc=1.9.0
- packaging=23.1
- pandas=2.0.3
- parso=0.8.3
- pathspec=0.11.2
- patsy=0.5.3
- pexpect=4.8.0
- phik=0.12.3
- pickleshare=0.7.5
- pillow=10.0.0
- pip=23.2.1
- platformdirs=3.10.0
- pooch=1.7.0
- prompt-toolkit=3.0.39
- prompt_toolkit=3.0.39
- psutil=5.9.5
- pthread-stubs=0.4
- ptyprocess=0.7.0
- pure_eval=0.2.2
- pyarrow=12.0.1
- pybind11-abi=4
- pycparser=2.21
- pydantic=1.10.12
- pygments=2.16.1
- pymongo=4.4.1
- pyodbc=4.0.39
- pyparsing=3.0.9
- pysocks=1.7.1
- python=3.11.4
- python-dateutil=2.8.2
- python-dotenv=1.0.0
- python-tzdata=2023.3
- python_abi=3.11
- pytz=2023.3
- pywavelets=1.4.1
- pyyaml=6.0
- pyzmq=25.1.1
- rdma-core=28.9
- re2=2023.03.02
- readline=8.2
- requests=2.31.0
- rich=13.5.1
- ruff=0.0.285
- s2n=1.3.49
- scipy=1.10.1
- seaborn=0.12.2
- seaborn-base=0.12.2
- setuptools=68.1.2
- shellingham=1.5.3
- six=1.16.0
- snappy=1.1.10
- sniffio=1.3.0
- stack_data=0.6.2
- statsmodels=0.14.0
- tangled-up-in-unicode=0.2.0
- tk=8.6.12
- tornado=6.3.3
- tqdm=4.66.1
- traitlets=5.9.0
- typeguard=2.13.3
- typer=0.9.0
- typing-extensions=4.7.1
- typing_extensions=4.7.1
- tzdata=2023c
- ucx=1.14.1
- unixodbc=2.3.12
- urllib3=2.0.4
- visions=0.7.5
- watchdog=2.1.6
- wcwidth=0.2.6
- wheel=0.41.2
- widgetsnbextension=4.0.8
- wordcloud=1.9.2
- xmltodict=0.13.0
- xorg-libxau=1.0.11
- xorg-libxdmcp=1.1.3
- xz=5.2.6
- yaml=0.2.5
- yarl=1.9.2
- ydata-profiling=4.5.1
- zeromq=4.3.4
- zipp=3.16.2
- zstd=1.5.2
prefix: /home/melinda/micromamba/envs/dados
31 changes: 23 additions & 8 deletions extracao/aero/aisgeo.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,28 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/08_aisgeo.ipynb.

# %% auto 0
__all__ = ['LINK_VOR', 'LINK_DME', 'LINK_NDB', 'COLS_VOR', 'COLS_NDB', 'COLS_DME', 'UNIQUE_COLS', 'convert_frequency',
'get_geodf', 'get_aisg']
__all__ = [
"LINK_VOR",
"LINK_DME",
"LINK_NDB",
"COLS_VOR",
"COLS_NDB",
"COLS_DME",
"UNIQUE_COLS",
"convert_frequency",
"get_geodf",
"get_aisg",
]

# %% ../../nbs/08_aisgeo.ipynb 2
import os
import json
from urllib.request import urlopen
import os
from pathlib import Path
from typing import List
from dotenv import load_dotenv, find_dotenv
from urllib.request import urlopen

import pandas as pd
from pathlib import Path
from dotenv import find_dotenv, load_dotenv

load_dotenv(find_dotenv(), override=True)

Expand Down Expand Up @@ -48,7 +59,8 @@
"Channel",
)

UNIQUE_COLS = ['Frequency', 'Latitude', 'Longitude']
UNIQUE_COLS = ["Frequency", "Latitude", "Longitude"]


# %% ../../nbs/08_aisgeo.ipynb 6
def convert_frequency(
Expand Down Expand Up @@ -76,7 +88,9 @@ def _process_frequency(
cols: List[str], # Subconjunto de Colunas relevantes do DataFrame
) -> pd.DataFrame: # Dataframe com os dados de frequência devidamente processados
if cols == COLS_DME:
df_channels = pd.read_csv(f'{Path(__file__).parent}/{os.environ["PATH_CHANNELS"]}', dtype="string")
df_channels = pd.read_csv(
f'{Path(__file__).parent}/{os.environ["PATH_CHANNELS"]}', dtype="string"
)
df = df.dropna(subset=[cols[0]])
df["Channel"] = df[cols[0]].astype("int").astype("string") + df[cols[1]]
df["Frequency"] = -1.0
Expand All @@ -96,6 +110,7 @@ def _process_frequency(
)
return df


# %% ../../nbs/08_aisgeo.ipynb 8
def _filter_df(df, cols): # sourcery skip: use-fstring-for-concatenation
df.fillna("", inplace=True)
Expand Down
26 changes: 19 additions & 7 deletions extracao/aero/aisweb.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,31 @@
# AUTOGENERATED! DO NOT EDIT! File to edit: ../../nbs/07_aisweb.ipynb.

# %% auto 0
__all__ = ['SIGLA_AERO', 'URL', 'TYPE', 'COLUMNS', 'UNIQUE_COLS', 'convert_latitude', 'convert_longitude', 'AisWeb', 'get_aisw']
__all__ = [
"SIGLA_AERO",
"URL",
"TYPE",
"COLUMNS",
"UNIQUE_COLS",
"convert_latitude",
"convert_longitude",
"AisWeb",
"get_aisw",
]

# %% ../../nbs/07_aisweb.ipynb 2
import os
import re
from typing import Iterable
from functools import cached_property
from typing import Iterable

import pandas as pd
import requests
import xmltodict
import pandas as pd
from fastcore.utils import store_attr
from dotenv import find_dotenv, load_dotenv
from fastcore.parallel import parallel
from dotenv import load_dotenv, find_dotenv
from fastcore.utils import store_attr

from .icao import map_channels

load_dotenv(find_dotenv(), override=True)
Expand All @@ -24,7 +35,8 @@
URL = "http://aisweb.decea.gov.br/api/?apiKey={}&apiPass={}&area=rotaer&rowend=10000"
TYPE = ["COM", "NAV"]
COLUMNS = ["Frequency", "Latitude", "Longitude", "Description"]
UNIQUE_COLS = ['Frequency', 'Latitude', 'Longitude']
UNIQUE_COLS = ["Frequency", "Latitude", "Longitude"]


# %% ../../nbs/07_aisweb.ipynb 6
def convert_latitude(
Expand Down Expand Up @@ -206,7 +218,7 @@ def records(
pause=0.1,
progress=True,
)
df = pd.concat(records).astype('string')
df = pd.concat(records).astype("string")
return map_channels(df, "AISW").drop_duplicates(UNIQUE_COLS, ignore_index=True)


Expand Down
Loading

0 comments on commit 2e751a3

Please sign in to comment.